Fix warning with -Wsign-compare -Wsystem-headers
[official-gcc.git] / gcc / builtins.c
blob6716aabfa7319d76cac4ffa1626528ce458feb6d
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 poly_int64 bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = targetm.constant_alignment (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
354 else if (TREE_CODE (exp) == STRING_CST)
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = targetm.constant_alignment (exp, align);
362 known_alignment = true;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
378 /* Account for the alignment of runtime coefficients, so that the constant
379 bitpos is guaranteed to be accurate. */
380 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
381 if (alt_align != 0 && alt_align < align)
383 align = alt_align;
384 known_alignment = false;
387 *alignp = align;
388 *bitposp = bitpos.coeffs[0] & (align - 1);
389 return known_alignment;
392 /* For a memory reference expression EXP compute values M and N such that M
393 divides (&EXP - N) and such that N < M. If these numbers can be determined,
394 store M in alignp and N in *BITPOSP and return true. Otherwise return false
395 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
397 bool
398 get_object_alignment_1 (tree exp, unsigned int *alignp,
399 unsigned HOST_WIDE_INT *bitposp)
401 return get_object_alignment_2 (exp, alignp, bitposp, false);
404 /* Return the alignment in bits of EXP, an object. */
406 unsigned int
407 get_object_alignment (tree exp)
409 unsigned HOST_WIDE_INT bitpos = 0;
410 unsigned int align;
412 get_object_alignment_1 (exp, &align, &bitpos);
414 /* align and bitpos now specify known low bits of the pointer.
415 ptr & (align - 1) == bitpos. */
417 if (bitpos != 0)
418 align = least_bit_hwi (bitpos);
419 return align;
422 /* For a pointer valued expression EXP compute values M and N such that M
423 divides (EXP - N) and such that N < M. If these numbers can be determined,
424 store M in alignp and N in *BITPOSP and return true. Return false if
425 the results are just a conservative approximation.
427 If EXP is not a pointer, false is returned too. */
429 bool
430 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
433 STRIP_NOPS (exp);
435 if (TREE_CODE (exp) == ADDR_EXPR)
436 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
437 alignp, bitposp, true);
438 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
440 unsigned int align;
441 unsigned HOST_WIDE_INT bitpos;
442 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
443 &align, &bitpos);
444 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
445 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
446 else
448 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
449 if (trailing_zeros < HOST_BITS_PER_INT)
451 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
452 if (inner)
453 align = MIN (align, inner);
456 *alignp = align;
457 *bitposp = bitpos & (align - 1);
458 return res;
460 else if (TREE_CODE (exp) == SSA_NAME
461 && POINTER_TYPE_P (TREE_TYPE (exp)))
463 unsigned int ptr_align, ptr_misalign;
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
466 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 *bitposp = ptr_misalign * BITS_PER_UNIT;
469 *alignp = ptr_align * BITS_PER_UNIT;
470 /* Make sure to return a sensible alignment when the multiplication
471 by BITS_PER_UNIT overflowed. */
472 if (*alignp == 0)
473 *alignp = 1u << (HOST_BITS_PER_INT - 1);
474 /* We cannot really tell whether this result is an approximation. */
475 return false;
477 else
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
484 else if (TREE_CODE (exp) == INTEGER_CST)
486 *alignp = BIGGEST_ALIGNMENT;
487 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
488 & (BIGGEST_ALIGNMENT - 1));
489 return true;
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
497 /* Return the alignment in bits of EXP, a pointer valued expression.
498 The alignment returned is, by default, the alignment of the thing that
499 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501 Otherwise, look at the expression to see if we can do better, i.e., if the
502 expression is actually pointing at an object whose alignment is tighter. */
504 unsigned int
505 get_pointer_alignment (tree exp)
507 unsigned HOST_WIDE_INT bitpos = 0;
508 unsigned int align;
510 get_pointer_alignment_1 (exp, &align, &bitpos);
512 /* align and bitpos now specify known low bits of the pointer.
513 ptr & (align - 1) == bitpos. */
515 if (bitpos != 0)
516 align = least_bit_hwi (bitpos);
518 return align;
521 /* Return the number of leading non-zero elements in the sequence
522 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
523 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
525 unsigned
526 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
528 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
530 unsigned n;
532 if (eltsize == 1)
534 /* Optimize the common case of plain char. */
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n;
538 if (!*elt)
539 break;
542 else
544 for (n = 0; n < maxelts; n++)
546 const char *elt = (const char*) ptr + n * eltsize;
547 if (!memcmp (elt, "\0\0\0\0", eltsize))
548 break;
551 return n;
554 /* Compute the length of a null-terminated character string or wide
555 character string handling character sizes of 1, 2, and 4 bytes.
556 TREE_STRING_LENGTH is not the right way because it evaluates to
557 the size of the character array in bytes (as opposed to characters)
558 and because it can contain a zero byte in the middle.
560 ONLY_VALUE should be nonzero if the result is not going to be emitted
561 into the instruction stream and zero if it is going to be expanded.
562 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
563 is returned, otherwise NULL, since
564 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
565 evaluate the side-effects.
567 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
568 accesses. Note that this implies the result is not going to be emitted
569 into the instruction stream.
571 The value returned is of type `ssizetype'.
573 Unfortunately, string_constant can't access the values of const char
574 arrays with initializers, so neither can we do so here. */
576 tree
577 c_strlen (tree src, int only_value)
579 STRIP_NOPS (src);
580 if (TREE_CODE (src) == COND_EXPR
581 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 tree len1, len2;
585 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
586 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
587 if (tree_int_cst_equal (len1, len2))
588 return len1;
591 if (TREE_CODE (src) == COMPOUND_EXPR
592 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 return c_strlen (TREE_OPERAND (src, 1), only_value);
595 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
597 /* Offset from the beginning of the string in bytes. */
598 tree byteoff;
599 src = string_constant (src, &byteoff);
600 if (src == 0)
601 return NULL_TREE;
603 /* Determine the size of the string element. */
604 unsigned eltsize
605 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
607 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
608 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
609 in case the latter is less than the size of the array, such as when
610 SRC refers to a short string literal used to initialize a large array.
611 In that case, the elements of the array after the terminating NUL are
612 all NUL. */
613 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
614 strelts = strelts / eltsize - 1;
616 HOST_WIDE_INT maxelts = strelts;
617 tree type = TREE_TYPE (src);
618 if (tree size = TYPE_SIZE_UNIT (type))
619 if (tree_fits_shwi_p (size))
621 maxelts = tree_to_uhwi (size);
622 maxelts = maxelts / eltsize - 1;
625 /* PTR can point to the byte representation of any string type, including
626 char* and wchar_t*. */
627 const char *ptr = TREE_STRING_POINTER (src);
629 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
631 /* If the string has an internal NUL character followed by any
632 non-NUL characters (e.g., "foo\0bar"), we can't compute
633 the offset to the following NUL if we don't know where to
634 start searching for it. */
635 unsigned len = string_length (ptr, eltsize, strelts);
636 if (len < strelts)
638 /* Return when an embedded null character is found. */
639 return NULL_TREE;
642 if (!maxelts)
643 return ssize_int (0);
645 /* We don't know the starting offset, but we do know that the string
646 has no internal zero bytes. If the offset falls within the bounds
647 of the string subtract the offset from the length of the string,
648 and return that. Otherwise the length is zero. Take care to
649 use SAVE_EXPR in case the OFFSET has side-effects. */
650 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
651 offsave = fold_convert (ssizetype, offsave);
652 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
653 build_int_cst (ssizetype, len * eltsize));
654 tree lenexp = size_diffop_loc (loc, ssize_int (strelts * eltsize), offsave);
655 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
656 build_zero_cst (ssizetype));
659 /* Offset from the beginning of the string in elements. */
660 HOST_WIDE_INT eltoff;
662 /* We have a known offset into the string. Start searching there for
663 a null character if we can represent it as a single HOST_WIDE_INT. */
664 if (byteoff == 0)
665 eltoff = 0;
666 else if (! tree_fits_shwi_p (byteoff))
667 eltoff = -1;
668 else
669 eltoff = tree_to_shwi (byteoff) / eltsize;
671 /* If the offset is known to be out of bounds, warn, and call strlen at
672 runtime. */
673 if (eltoff < 0 || eltoff > maxelts)
675 /* Suppress multiple warnings for propagated constant strings. */
676 if (only_value != 2
677 && !TREE_NO_WARNING (src))
679 warning_at (loc, OPT_Warray_bounds,
680 "offset %qwi outside bounds of constant string",
681 eltoff);
682 TREE_NO_WARNING (src) = 1;
684 return NULL_TREE;
687 /* Use strlen to search for the first zero byte. Since any strings
688 constructed with build_string will have nulls appended, we win even
689 if we get handed something like (char[4])"abcd".
691 Since ELTOFF is our starting index into the string, no further
692 calculation is needed. */
693 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
694 maxelts - eltoff);
696 return ssize_int (len);
699 /* Return a constant integer corresponding to target reading
700 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
702 static rtx
703 c_readstr (const char *str, scalar_int_mode mode)
705 HOST_WIDE_INT ch;
706 unsigned int i, j;
707 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
709 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
710 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
711 / HOST_BITS_PER_WIDE_INT;
713 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
714 for (i = 0; i < len; i++)
715 tmp[i] = 0;
717 ch = 1;
718 for (i = 0; i < GET_MODE_SIZE (mode); i++)
720 j = i;
721 if (WORDS_BIG_ENDIAN)
722 j = GET_MODE_SIZE (mode) - i - 1;
723 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
724 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
725 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
726 j *= BITS_PER_UNIT;
728 if (ch)
729 ch = (unsigned char) str[i];
730 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
733 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
734 return immed_wide_int_const (c, mode);
737 /* Cast a target constant CST to target CHAR and if that value fits into
738 host char type, return zero and put that value into variable pointed to by
739 P. */
741 static int
742 target_char_cast (tree cst, char *p)
744 unsigned HOST_WIDE_INT val, hostval;
746 if (TREE_CODE (cst) != INTEGER_CST
747 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
748 return 1;
750 /* Do not care if it fits or not right here. */
751 val = TREE_INT_CST_LOW (cst);
753 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
754 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
756 hostval = val;
757 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
758 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
760 if (val != hostval)
761 return 1;
763 *p = hostval;
764 return 0;
767 /* Similar to save_expr, but assumes that arbitrary code is not executed
768 in between the multiple evaluations. In particular, we assume that a
769 non-addressable local variable will not be modified. */
771 static tree
772 builtin_save_expr (tree exp)
774 if (TREE_CODE (exp) == SSA_NAME
775 || (TREE_ADDRESSABLE (exp) == 0
776 && (TREE_CODE (exp) == PARM_DECL
777 || (VAR_P (exp) && !TREE_STATIC (exp)))))
778 return exp;
780 return save_expr (exp);
783 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
784 times to get the address of either a higher stack frame, or a return
785 address located within it (depending on FNDECL_CODE). */
787 static rtx
788 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
790 int i;
791 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
792 if (tem == NULL_RTX)
794 /* For a zero count with __builtin_return_address, we don't care what
795 frame address we return, because target-specific definitions will
796 override us. Therefore frame pointer elimination is OK, and using
797 the soft frame pointer is OK.
799 For a nonzero count, or a zero count with __builtin_frame_address,
800 we require a stable offset from the current frame pointer to the
801 previous one, so we must use the hard frame pointer, and
802 we must disable frame pointer elimination. */
803 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
804 tem = frame_pointer_rtx;
805 else
807 tem = hard_frame_pointer_rtx;
809 /* Tell reload not to eliminate the frame pointer. */
810 crtl->accesses_prior_frames = 1;
814 if (count > 0)
815 SETUP_FRAME_ADDRESSES ();
817 /* On the SPARC, the return address is not in the frame, it is in a
818 register. There is no way to access it off of the current frame
819 pointer, but it can be accessed off the previous frame pointer by
820 reading the value from the register window save area. */
821 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
822 count--;
824 /* Scan back COUNT frames to the specified frame. */
825 for (i = 0; i < count; i++)
827 /* Assume the dynamic chain pointer is in the word that the
828 frame address points to, unless otherwise specified. */
829 tem = DYNAMIC_CHAIN_ADDRESS (tem);
830 tem = memory_address (Pmode, tem);
831 tem = gen_frame_mem (Pmode, tem);
832 tem = copy_to_reg (tem);
835 /* For __builtin_frame_address, return what we've got. But, on
836 the SPARC for example, we may have to add a bias. */
837 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
838 return FRAME_ADDR_RTX (tem);
840 /* For __builtin_return_address, get the return address from that frame. */
841 #ifdef RETURN_ADDR_RTX
842 tem = RETURN_ADDR_RTX (count, tem);
843 #else
844 tem = memory_address (Pmode,
845 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
846 tem = gen_frame_mem (Pmode, tem);
847 #endif
848 return tem;
851 /* Alias set used for setjmp buffer. */
852 static alias_set_type setjmp_alias_set = -1;
854 /* Construct the leading half of a __builtin_setjmp call. Control will
855 return to RECEIVER_LABEL. This is also called directly by the SJLJ
856 exception handling code. */
858 void
859 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
861 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
862 rtx stack_save;
863 rtx mem;
865 if (setjmp_alias_set == -1)
866 setjmp_alias_set = new_alias_set ();
868 buf_addr = convert_memory_address (Pmode, buf_addr);
870 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
872 /* We store the frame pointer and the address of receiver_label in
873 the buffer and use the rest of it for the stack save area, which
874 is machine-dependent. */
876 mem = gen_rtx_MEM (Pmode, buf_addr);
877 set_mem_alias_set (mem, setjmp_alias_set);
878 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
880 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
881 GET_MODE_SIZE (Pmode))),
882 set_mem_alias_set (mem, setjmp_alias_set);
884 emit_move_insn (validize_mem (mem),
885 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
887 stack_save = gen_rtx_MEM (sa_mode,
888 plus_constant (Pmode, buf_addr,
889 2 * GET_MODE_SIZE (Pmode)));
890 set_mem_alias_set (stack_save, setjmp_alias_set);
891 emit_stack_save (SAVE_NONLOCAL, &stack_save);
893 /* If there is further processing to do, do it. */
894 if (targetm.have_builtin_setjmp_setup ())
895 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
897 /* We have a nonlocal label. */
898 cfun->has_nonlocal_label = 1;
901 /* Construct the trailing part of a __builtin_setjmp call. This is
902 also called directly by the SJLJ exception handling code.
903 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
905 void
906 expand_builtin_setjmp_receiver (rtx receiver_label)
908 rtx chain;
910 /* Mark the FP as used when we get here, so we have to make sure it's
911 marked as used by this function. */
912 emit_use (hard_frame_pointer_rtx);
914 /* Mark the static chain as clobbered here so life information
915 doesn't get messed up for it. */
916 chain = rtx_for_static_chain (current_function_decl, true);
917 if (chain && REG_P (chain))
918 emit_clobber (chain);
920 /* Now put in the code to restore the frame pointer, and argument
921 pointer, if needed. */
922 if (! targetm.have_nonlocal_goto ())
924 /* First adjust our frame pointer to its actual value. It was
925 previously set to the start of the virtual area corresponding to
926 the stacked variables when we branched here and now needs to be
927 adjusted to the actual hardware fp value.
929 Assignments to virtual registers are converted by
930 instantiate_virtual_regs into the corresponding assignment
931 to the underlying register (fp in this case) that makes
932 the original assignment true.
933 So the following insn will actually be decrementing fp by
934 TARGET_STARTING_FRAME_OFFSET. */
935 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
937 /* Restoring the frame pointer also modifies the hard frame pointer.
938 Mark it used (so that the previous assignment remains live once
939 the frame pointer is eliminated) and clobbered (to represent the
940 implicit update from the assignment). */
941 emit_use (hard_frame_pointer_rtx);
942 emit_clobber (hard_frame_pointer_rtx);
945 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
947 /* If the argument pointer can be eliminated in favor of the
948 frame pointer, we don't need to restore it. We assume here
949 that if such an elimination is present, it can always be used.
950 This is the case on all known machines; if we don't make this
951 assumption, we do unnecessary saving on many machines. */
952 size_t i;
953 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
955 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
956 if (elim_regs[i].from == ARG_POINTER_REGNUM
957 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
958 break;
960 if (i == ARRAY_SIZE (elim_regs))
962 /* Now restore our arg pointer from the address at which it
963 was saved in our stack frame. */
964 emit_move_insn (crtl->args.internal_arg_pointer,
965 copy_to_reg (get_arg_pointer_save_area ()));
969 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
970 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
971 else if (targetm.have_nonlocal_goto_receiver ())
972 emit_insn (targetm.gen_nonlocal_goto_receiver ());
973 else
974 { /* Nothing */ }
976 /* We must not allow the code we just generated to be reordered by
977 scheduling. Specifically, the update of the frame pointer must
978 happen immediately, not later. */
979 emit_insn (gen_blockage ());
982 /* __builtin_longjmp is passed a pointer to an array of five words (not
983 all will be used on all machines). It operates similarly to the C
984 library function of the same name, but is more efficient. Much of
985 the code below is copied from the handling of non-local gotos. */
987 static void
988 expand_builtin_longjmp (rtx buf_addr, rtx value)
990 rtx fp, lab, stack;
991 rtx_insn *insn, *last;
992 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
994 /* DRAP is needed for stack realign if longjmp is expanded to current
995 function */
996 if (SUPPORTS_STACK_ALIGNMENT)
997 crtl->need_drap = true;
999 if (setjmp_alias_set == -1)
1000 setjmp_alias_set = new_alias_set ();
1002 buf_addr = convert_memory_address (Pmode, buf_addr);
1004 buf_addr = force_reg (Pmode, buf_addr);
1006 /* We require that the user must pass a second argument of 1, because
1007 that is what builtin_setjmp will return. */
1008 gcc_assert (value == const1_rtx);
1010 last = get_last_insn ();
1011 if (targetm.have_builtin_longjmp ())
1012 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1013 else
1015 fp = gen_rtx_MEM (Pmode, buf_addr);
1016 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1017 GET_MODE_SIZE (Pmode)));
1019 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1020 2 * GET_MODE_SIZE (Pmode)));
1021 set_mem_alias_set (fp, setjmp_alias_set);
1022 set_mem_alias_set (lab, setjmp_alias_set);
1023 set_mem_alias_set (stack, setjmp_alias_set);
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 if (targetm.have_nonlocal_goto ())
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1032 else
1034 lab = copy_to_reg (lab);
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 emit_move_insn (hard_frame_pointer_rtx, fp);
1040 emit_stack_restore (SAVE_NONLOCAL, stack);
1042 emit_use (hard_frame_pointer_rtx);
1043 emit_use (stack_pointer_rtx);
1044 emit_indirect_jump (lab);
1048 /* Search backwards and mark the jump insn as a non-local goto.
1049 Note that this precludes the use of __builtin_longjmp to a
1050 __builtin_setjmp target in the same function. However, we've
1051 already cautioned the user that these functions are for
1052 internal exception handling use only. */
1053 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 gcc_assert (insn != last);
1057 if (JUMP_P (insn))
1059 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1060 break;
1062 else if (CALL_P (insn))
1063 break;
1067 static inline bool
1068 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 return (iter->i < iter->n);
1073 /* This function validates the types of a function call argument list
1074 against a specified list of tree_codes. If the last specifier is a 0,
1075 that represents an ellipsis, otherwise the last specifier must be a
1076 VOID_TYPE. */
1078 static bool
1079 validate_arglist (const_tree callexpr, ...)
1081 enum tree_code code;
1082 bool res = 0;
1083 va_list ap;
1084 const_call_expr_arg_iterator iter;
1085 const_tree arg;
1087 va_start (ap, callexpr);
1088 init_const_call_expr_arg_iterator (callexpr, &iter);
1090 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1091 tree fn = CALL_EXPR_FN (callexpr);
1092 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1094 for (unsigned argno = 1; ; ++argno)
1096 code = (enum tree_code) va_arg (ap, int);
1098 switch (code)
1100 case 0:
1101 /* This signifies an ellipses, any further arguments are all ok. */
1102 res = true;
1103 goto end;
1104 case VOID_TYPE:
1105 /* This signifies an endlink, if no arguments remain, return
1106 true, otherwise return false. */
1107 res = !more_const_call_expr_args_p (&iter);
1108 goto end;
1109 case POINTER_TYPE:
1110 /* The actual argument must be nonnull when either the whole
1111 called function has been declared nonnull, or when the formal
1112 argument corresponding to the actual argument has been. */
1113 if (argmap
1114 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1116 arg = next_const_call_expr_arg (&iter);
1117 if (!validate_arg (arg, code) || integer_zerop (arg))
1118 goto end;
1119 break;
1121 /* FALLTHRU */
1122 default:
1123 /* If no parameters remain or the parameter's code does not
1124 match the specified code, return false. Otherwise continue
1125 checking any remaining arguments. */
1126 arg = next_const_call_expr_arg (&iter);
1127 if (!validate_arg (arg, code))
1128 goto end;
1129 break;
1133 /* We need gotos here since we can only have one VA_CLOSE in a
1134 function. */
1135 end: ;
1136 va_end (ap);
1138 BITMAP_FREE (argmap);
1140 return res;
1143 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1144 and the address of the save area. */
1146 static rtx
1147 expand_builtin_nonlocal_goto (tree exp)
1149 tree t_label, t_save_area;
1150 rtx r_label, r_save_area, r_fp, r_sp;
1151 rtx_insn *insn;
1153 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1154 return NULL_RTX;
1156 t_label = CALL_EXPR_ARG (exp, 0);
1157 t_save_area = CALL_EXPR_ARG (exp, 1);
1159 r_label = expand_normal (t_label);
1160 r_label = convert_memory_address (Pmode, r_label);
1161 r_save_area = expand_normal (t_save_area);
1162 r_save_area = convert_memory_address (Pmode, r_save_area);
1163 /* Copy the address of the save location to a register just in case it was
1164 based on the frame pointer. */
1165 r_save_area = copy_to_reg (r_save_area);
1166 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1167 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1168 plus_constant (Pmode, r_save_area,
1169 GET_MODE_SIZE (Pmode)));
1171 crtl->has_nonlocal_goto = 1;
1173 /* ??? We no longer need to pass the static chain value, afaik. */
1174 if (targetm.have_nonlocal_goto ())
1175 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1176 else
1178 r_label = copy_to_reg (r_label);
1180 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1181 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1183 /* Restore frame pointer for containing function. */
1184 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1185 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1187 /* USE of hard_frame_pointer_rtx added for consistency;
1188 not clear if really needed. */
1189 emit_use (hard_frame_pointer_rtx);
1190 emit_use (stack_pointer_rtx);
1192 /* If the architecture is using a GP register, we must
1193 conservatively assume that the target function makes use of it.
1194 The prologue of functions with nonlocal gotos must therefore
1195 initialize the GP register to the appropriate value, and we
1196 must then make sure that this value is live at the point
1197 of the jump. (Note that this doesn't necessarily apply
1198 to targets with a nonlocal_goto pattern; they are free
1199 to implement it in their own way. Note also that this is
1200 a no-op if the GP register is a global invariant.) */
1201 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1202 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1203 emit_use (pic_offset_table_rtx);
1205 emit_indirect_jump (r_label);
1208 /* Search backwards to the jump insn and mark it as a
1209 non-local goto. */
1210 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1212 if (JUMP_P (insn))
1214 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1215 break;
1217 else if (CALL_P (insn))
1218 break;
1221 return const0_rtx;
1224 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1225 (not all will be used on all machines) that was passed to __builtin_setjmp.
1226 It updates the stack pointer in that block to the current value. This is
1227 also called directly by the SJLJ exception handling code. */
1229 void
1230 expand_builtin_update_setjmp_buf (rtx buf_addr)
1232 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1233 buf_addr = convert_memory_address (Pmode, buf_addr);
1234 rtx stack_save
1235 = gen_rtx_MEM (sa_mode,
1236 memory_address
1237 (sa_mode,
1238 plus_constant (Pmode, buf_addr,
1239 2 * GET_MODE_SIZE (Pmode))));
1241 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1244 /* Expand a call to __builtin_prefetch. For a target that does not support
1245 data prefetch, evaluate the memory address argument in case it has side
1246 effects. */
1248 static void
1249 expand_builtin_prefetch (tree exp)
1251 tree arg0, arg1, arg2;
1252 int nargs;
1253 rtx op0, op1, op2;
1255 if (!validate_arglist (exp, POINTER_TYPE, 0))
1256 return;
1258 arg0 = CALL_EXPR_ARG (exp, 0);
1260 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1261 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1262 locality). */
1263 nargs = call_expr_nargs (exp);
1264 if (nargs > 1)
1265 arg1 = CALL_EXPR_ARG (exp, 1);
1266 else
1267 arg1 = integer_zero_node;
1268 if (nargs > 2)
1269 arg2 = CALL_EXPR_ARG (exp, 2);
1270 else
1271 arg2 = integer_three_node;
1273 /* Argument 0 is an address. */
1274 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1276 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1277 if (TREE_CODE (arg1) != INTEGER_CST)
1279 error ("second argument to %<__builtin_prefetch%> must be a constant");
1280 arg1 = integer_zero_node;
1282 op1 = expand_normal (arg1);
1283 /* Argument 1 must be either zero or one. */
1284 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1286 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1287 " using zero");
1288 op1 = const0_rtx;
1291 /* Argument 2 (locality) must be a compile-time constant int. */
1292 if (TREE_CODE (arg2) != INTEGER_CST)
1294 error ("third argument to %<__builtin_prefetch%> must be a constant");
1295 arg2 = integer_zero_node;
1297 op2 = expand_normal (arg2);
1298 /* Argument 2 must be 0, 1, 2, or 3. */
1299 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1301 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1302 op2 = const0_rtx;
1305 if (targetm.have_prefetch ())
1307 struct expand_operand ops[3];
1309 create_address_operand (&ops[0], op0);
1310 create_integer_operand (&ops[1], INTVAL (op1));
1311 create_integer_operand (&ops[2], INTVAL (op2));
1312 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1313 return;
1316 /* Don't do anything with direct references to volatile memory, but
1317 generate code to handle other side effects. */
1318 if (!MEM_P (op0) && side_effects_p (op0))
1319 emit_insn (op0);
1322 /* Get a MEM rtx for expression EXP which is the address of an operand
1323 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1324 the maximum length of the block of memory that might be accessed or
1325 NULL if unknown. */
1327 static rtx
1328 get_memory_rtx (tree exp, tree len)
1330 tree orig_exp = exp;
1331 rtx addr, mem;
1333 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1334 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1335 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1336 exp = TREE_OPERAND (exp, 0);
1338 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1339 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1341 /* Get an expression we can use to find the attributes to assign to MEM.
1342 First remove any nops. */
1343 while (CONVERT_EXPR_P (exp)
1344 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1345 exp = TREE_OPERAND (exp, 0);
1347 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1348 (as builtin stringops may alias with anything). */
1349 exp = fold_build2 (MEM_REF,
1350 build_array_type (char_type_node,
1351 build_range_type (sizetype,
1352 size_one_node, len)),
1353 exp, build_int_cst (ptr_type_node, 0));
1355 /* If the MEM_REF has no acceptable address, try to get the base object
1356 from the original address we got, and build an all-aliasing
1357 unknown-sized access to that one. */
1358 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1359 set_mem_attributes (mem, exp, 0);
1360 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1361 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1362 0))))
1364 exp = build_fold_addr_expr (exp);
1365 exp = fold_build2 (MEM_REF,
1366 build_array_type (char_type_node,
1367 build_range_type (sizetype,
1368 size_zero_node,
1369 NULL)),
1370 exp, build_int_cst (ptr_type_node, 0));
1371 set_mem_attributes (mem, exp, 0);
1373 set_mem_alias_set (mem, 0);
1374 return mem;
1377 /* Built-in functions to perform an untyped call and return. */
1379 #define apply_args_mode \
1380 (this_target_builtins->x_apply_args_mode)
1381 #define apply_result_mode \
1382 (this_target_builtins->x_apply_result_mode)
1384 /* Return the size required for the block returned by __builtin_apply_args,
1385 and initialize apply_args_mode. */
1387 static int
1388 apply_args_size (void)
1390 static int size = -1;
1391 int align;
1392 unsigned int regno;
1394 /* The values computed by this function never change. */
1395 if (size < 0)
1397 /* The first value is the incoming arg-pointer. */
1398 size = GET_MODE_SIZE (Pmode);
1400 /* The second value is the structure value address unless this is
1401 passed as an "invisible" first argument. */
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1406 if (FUNCTION_ARG_REGNO_P (regno))
1408 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1410 gcc_assert (mode != VOIDmode);
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1415 size += GET_MODE_SIZE (mode);
1416 apply_args_mode[regno] = mode;
1418 else
1420 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1423 return size;
1426 /* Return the size required for the block returned by __builtin_apply,
1427 and initialize apply_result_mode. */
1429 static int
1430 apply_result_size (void)
1432 static int size = -1;
1433 int align, regno;
1435 /* The values computed by this function never change. */
1436 if (size < 0)
1438 size = 0;
1440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441 if (targetm.calls.function_value_regno_p (regno))
1443 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1445 gcc_assert (mode != VOIDmode);
1447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 if (size % align != 0)
1449 size = CEIL (size, align) * align;
1450 size += GET_MODE_SIZE (mode);
1451 apply_result_mode[regno] = mode;
1453 else
1454 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1456 /* Allow targets that use untyped_call and untyped_return to override
1457 the size so that machine-specific information can be stored here. */
1458 #ifdef APPLY_RESULT_SIZE
1459 size = APPLY_RESULT_SIZE;
1460 #endif
1462 return size;
1465 /* Create a vector describing the result block RESULT. If SAVEP is true,
1466 the result block is used to save the values; otherwise it is used to
1467 restore the values. */
1469 static rtx
1470 result_vector (int savep, rtx result)
1472 int regno, size, align, nelts;
1473 fixed_size_mode mode;
1474 rtx reg, mem;
1475 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1477 size = nelts = 0;
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_result_mode[regno]) != VOIDmode)
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1484 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1485 mem = adjust_address (result, mode, size);
1486 savevec[nelts++] = (savep
1487 ? gen_rtx_SET (mem, reg)
1488 : gen_rtx_SET (reg, mem));
1489 size += GET_MODE_SIZE (mode);
1491 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1494 /* Save the state required to perform an untyped call with the same
1495 arguments as were passed to the current function. */
1497 static rtx
1498 expand_builtin_apply_args_1 (void)
1500 rtx registers, tem;
1501 int size, align, regno;
1502 fixed_size_mode mode;
1503 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1505 /* Create a block where the arg-pointer, structure value address,
1506 and argument registers can be saved. */
1507 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1509 /* Walk past the arg-pointer and structure value address. */
1510 size = GET_MODE_SIZE (Pmode);
1511 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1512 size += GET_MODE_SIZE (Pmode);
1514 /* Save each register used in calling a function to the block. */
1515 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1516 if ((mode = apply_args_mode[regno]) != VOIDmode)
1518 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1519 if (size % align != 0)
1520 size = CEIL (size, align) * align;
1522 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1524 emit_move_insn (adjust_address (registers, mode, size), tem);
1525 size += GET_MODE_SIZE (mode);
1528 /* Save the arg pointer to the block. */
1529 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1530 /* We need the pointer as the caller actually passed them to us, not
1531 as we might have pretended they were passed. Make sure it's a valid
1532 operand, as emit_move_insn isn't expected to handle a PLUS. */
1533 if (STACK_GROWS_DOWNWARD)
1535 = force_operand (plus_constant (Pmode, tem,
1536 crtl->args.pretend_args_size),
1537 NULL_RTX);
1538 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1540 size = GET_MODE_SIZE (Pmode);
1542 /* Save the structure value address unless this is passed as an
1543 "invisible" first argument. */
1544 if (struct_incoming_value)
1546 emit_move_insn (adjust_address (registers, Pmode, size),
1547 copy_to_reg (struct_incoming_value));
1548 size += GET_MODE_SIZE (Pmode);
1551 /* Return the address of the block. */
1552 return copy_addr_to_reg (XEXP (registers, 0));
1555 /* __builtin_apply_args returns block of memory allocated on
1556 the stack into which is stored the arg pointer, structure
1557 value address, static chain, and all the registers that might
1558 possibly be used in performing a function call. The code is
1559 moved to the start of the function so the incoming values are
1560 saved. */
1562 static rtx
1563 expand_builtin_apply_args (void)
1565 /* Don't do __builtin_apply_args more than once in a function.
1566 Save the result of the first call and reuse it. */
1567 if (apply_args_value != 0)
1568 return apply_args_value;
1570 /* When this function is called, it means that registers must be
1571 saved on entry to this function. So we migrate the
1572 call to the first insn of this function. */
1573 rtx temp;
1575 start_sequence ();
1576 temp = expand_builtin_apply_args_1 ();
1577 rtx_insn *seq = get_insns ();
1578 end_sequence ();
1580 apply_args_value = temp;
1582 /* Put the insns after the NOTE that starts the function.
1583 If this is inside a start_sequence, make the outer-level insn
1584 chain current, so the code is placed at the start of the
1585 function. If internal_arg_pointer is a non-virtual pseudo,
1586 it needs to be placed after the function that initializes
1587 that pseudo. */
1588 push_topmost_sequence ();
1589 if (REG_P (crtl->args.internal_arg_pointer)
1590 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1591 emit_insn_before (seq, parm_birth_insn);
1592 else
1593 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1594 pop_topmost_sequence ();
1595 return temp;
1599 /* Perform an untyped call and save the state required to perform an
1600 untyped return of whatever value was returned by the given function. */
1602 static rtx
1603 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1605 int size, align, regno;
1606 fixed_size_mode mode;
1607 rtx incoming_args, result, reg, dest, src;
1608 rtx_call_insn *call_insn;
1609 rtx old_stack_level = 0;
1610 rtx call_fusage = 0;
1611 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1613 arguments = convert_memory_address (Pmode, arguments);
1615 /* Create a block where the return registers can be saved. */
1616 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1618 /* Fetch the arg pointer from the ARGUMENTS block. */
1619 incoming_args = gen_reg_rtx (Pmode);
1620 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1621 if (!STACK_GROWS_DOWNWARD)
1622 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1623 incoming_args, 0, OPTAB_LIB_WIDEN);
1625 /* Push a new argument block and copy the arguments. Do not allow
1626 the (potential) memcpy call below to interfere with our stack
1627 manipulations. */
1628 do_pending_stack_adjust ();
1629 NO_DEFER_POP;
1631 /* Save the stack with nonlocal if available. */
1632 if (targetm.have_save_stack_nonlocal ())
1633 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1634 else
1635 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1637 /* Allocate a block of memory onto the stack and copy the memory
1638 arguments to the outgoing arguments address. We can pass TRUE
1639 as the 4th argument because we just saved the stack pointer
1640 and will restore it right after the call. */
1641 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1643 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1644 may have already set current_function_calls_alloca to true.
1645 current_function_calls_alloca won't be set if argsize is zero,
1646 so we have to guarantee need_drap is true here. */
1647 if (SUPPORTS_STACK_ALIGNMENT)
1648 crtl->need_drap = true;
1650 dest = virtual_outgoing_args_rtx;
1651 if (!STACK_GROWS_DOWNWARD)
1653 if (CONST_INT_P (argsize))
1654 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1655 else
1656 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1658 dest = gen_rtx_MEM (BLKmode, dest);
1659 set_mem_align (dest, PARM_BOUNDARY);
1660 src = gen_rtx_MEM (BLKmode, incoming_args);
1661 set_mem_align (src, PARM_BOUNDARY);
1662 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1664 /* Refer to the argument block. */
1665 apply_args_size ();
1666 arguments = gen_rtx_MEM (BLKmode, arguments);
1667 set_mem_align (arguments, PARM_BOUNDARY);
1669 /* Walk past the arg-pointer and structure value address. */
1670 size = GET_MODE_SIZE (Pmode);
1671 if (struct_value)
1672 size += GET_MODE_SIZE (Pmode);
1674 /* Restore each of the registers previously saved. Make USE insns
1675 for each of these registers for use in making the call. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_args_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, regno);
1683 emit_move_insn (reg, adjust_address (arguments, mode, size));
1684 use_reg (&call_fusage, reg);
1685 size += GET_MODE_SIZE (mode);
1688 /* Restore the structure value address unless this is passed as an
1689 "invisible" first argument. */
1690 size = GET_MODE_SIZE (Pmode);
1691 if (struct_value)
1693 rtx value = gen_reg_rtx (Pmode);
1694 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1695 emit_move_insn (struct_value, value);
1696 if (REG_P (struct_value))
1697 use_reg (&call_fusage, struct_value);
1698 size += GET_MODE_SIZE (Pmode);
1701 /* All arguments and registers used for the call are set up by now! */
1702 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1704 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1705 and we don't want to load it into a register as an optimization,
1706 because prepare_call_address already did it if it should be done. */
1707 if (GET_CODE (function) != SYMBOL_REF)
1708 function = memory_address (FUNCTION_MODE, function);
1710 /* Generate the actual call instruction and save the return value. */
1711 if (targetm.have_untyped_call ())
1713 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1714 emit_call_insn (targetm.gen_untyped_call (mem, result,
1715 result_vector (1, result)));
1717 else if (targetm.have_call_value ())
1719 rtx valreg = 0;
1721 /* Locate the unique return register. It is not possible to
1722 express a call that sets more than one return register using
1723 call_value; use untyped_call for that. In fact, untyped_call
1724 only needs to save the return registers in the given block. */
1725 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1726 if ((mode = apply_result_mode[regno]) != VOIDmode)
1728 gcc_assert (!valreg); /* have_untyped_call required. */
1730 valreg = gen_rtx_REG (mode, regno);
1733 emit_insn (targetm.gen_call_value (valreg,
1734 gen_rtx_MEM (FUNCTION_MODE, function),
1735 const0_rtx, NULL_RTX, const0_rtx));
1737 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1739 else
1740 gcc_unreachable ();
1742 /* Find the CALL insn we just emitted, and attach the register usage
1743 information. */
1744 call_insn = last_call_insn ();
1745 add_function_usage_to (call_insn, call_fusage);
1747 /* Restore the stack. */
1748 if (targetm.have_save_stack_nonlocal ())
1749 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1750 else
1751 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1752 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1754 OK_DEFER_POP;
1756 /* Return the address of the result block. */
1757 result = copy_addr_to_reg (XEXP (result, 0));
1758 return convert_memory_address (ptr_mode, result);
1761 /* Perform an untyped return. */
1763 static void
1764 expand_builtin_return (rtx result)
1766 int size, align, regno;
1767 fixed_size_mode mode;
1768 rtx reg;
1769 rtx_insn *call_fusage = 0;
1771 result = convert_memory_address (Pmode, result);
1773 apply_result_size ();
1774 result = gen_rtx_MEM (BLKmode, result);
1776 if (targetm.have_untyped_return ())
1778 rtx vector = result_vector (0, result);
1779 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1780 emit_barrier ();
1781 return;
1784 /* Restore the return value and note that each value is used. */
1785 size = 0;
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_result_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1793 emit_move_insn (reg, adjust_address (result, mode, size));
1795 push_to_sequence (call_fusage);
1796 emit_use (reg);
1797 call_fusage = get_insns ();
1798 end_sequence ();
1799 size += GET_MODE_SIZE (mode);
1802 /* Put the USE insns before the return. */
1803 emit_insn (call_fusage);
1805 /* Return whatever values was restored by jumping directly to the end
1806 of the function. */
1807 expand_naked_return ();
1810 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1812 static enum type_class
1813 type_to_class (tree type)
1815 switch (TREE_CODE (type))
1817 case VOID_TYPE: return void_type_class;
1818 case INTEGER_TYPE: return integer_type_class;
1819 case ENUMERAL_TYPE: return enumeral_type_class;
1820 case BOOLEAN_TYPE: return boolean_type_class;
1821 case POINTER_TYPE: return pointer_type_class;
1822 case REFERENCE_TYPE: return reference_type_class;
1823 case OFFSET_TYPE: return offset_type_class;
1824 case REAL_TYPE: return real_type_class;
1825 case COMPLEX_TYPE: return complex_type_class;
1826 case FUNCTION_TYPE: return function_type_class;
1827 case METHOD_TYPE: return method_type_class;
1828 case RECORD_TYPE: return record_type_class;
1829 case UNION_TYPE:
1830 case QUAL_UNION_TYPE: return union_type_class;
1831 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1832 ? string_type_class : array_type_class);
1833 case LANG_TYPE: return lang_type_class;
1834 default: return no_type_class;
1838 /* Expand a call EXP to __builtin_classify_type. */
1840 static rtx
1841 expand_builtin_classify_type (tree exp)
1843 if (call_expr_nargs (exp))
1844 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1845 return GEN_INT (no_type_class);
1848 /* This helper macro, meant to be used in mathfn_built_in below, determines
1849 which among a set of builtin math functions is appropriate for a given type
1850 mode. The `F' (float) and `L' (long double) are automatically generated
1851 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1852 types, there are additional types that are considered with 'F32', 'F64',
1853 'F128', etc. suffixes. */
1854 #define CASE_MATHFN(MATHFN) \
1855 CASE_CFN_##MATHFN: \
1856 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1857 fcodel = BUILT_IN_##MATHFN##L ; break;
1858 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1859 types. */
1860 #define CASE_MATHFN_FLOATN(MATHFN) \
1861 CASE_CFN_##MATHFN: \
1862 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1863 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1864 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1865 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1866 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1867 break;
1868 /* Similar to above, but appends _R after any F/L suffix. */
1869 #define CASE_MATHFN_REENT(MATHFN) \
1870 case CFN_BUILT_IN_##MATHFN##_R: \
1871 case CFN_BUILT_IN_##MATHFN##F_R: \
1872 case CFN_BUILT_IN_##MATHFN##L_R: \
1873 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1874 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1876 /* Return a function equivalent to FN but operating on floating-point
1877 values of type TYPE, or END_BUILTINS if no such function exists.
1878 This is purely an operation on function codes; it does not guarantee
1879 that the target actually has an implementation of the function. */
1881 static built_in_function
1882 mathfn_built_in_2 (tree type, combined_fn fn)
1884 tree mtype;
1885 built_in_function fcode, fcodef, fcodel;
1886 built_in_function fcodef16 = END_BUILTINS;
1887 built_in_function fcodef32 = END_BUILTINS;
1888 built_in_function fcodef64 = END_BUILTINS;
1889 built_in_function fcodef128 = END_BUILTINS;
1890 built_in_function fcodef32x = END_BUILTINS;
1891 built_in_function fcodef64x = END_BUILTINS;
1892 built_in_function fcodef128x = END_BUILTINS;
1894 switch (fn)
1896 CASE_MATHFN (ACOS)
1897 CASE_MATHFN (ACOSH)
1898 CASE_MATHFN (ASIN)
1899 CASE_MATHFN (ASINH)
1900 CASE_MATHFN (ATAN)
1901 CASE_MATHFN (ATAN2)
1902 CASE_MATHFN (ATANH)
1903 CASE_MATHFN (CBRT)
1904 CASE_MATHFN_FLOATN (CEIL)
1905 CASE_MATHFN (CEXPI)
1906 CASE_MATHFN_FLOATN (COPYSIGN)
1907 CASE_MATHFN (COS)
1908 CASE_MATHFN (COSH)
1909 CASE_MATHFN (DREM)
1910 CASE_MATHFN (ERF)
1911 CASE_MATHFN (ERFC)
1912 CASE_MATHFN (EXP)
1913 CASE_MATHFN (EXP10)
1914 CASE_MATHFN (EXP2)
1915 CASE_MATHFN (EXPM1)
1916 CASE_MATHFN (FABS)
1917 CASE_MATHFN (FDIM)
1918 CASE_MATHFN_FLOATN (FLOOR)
1919 CASE_MATHFN_FLOATN (FMA)
1920 CASE_MATHFN_FLOATN (FMAX)
1921 CASE_MATHFN_FLOATN (FMIN)
1922 CASE_MATHFN (FMOD)
1923 CASE_MATHFN (FREXP)
1924 CASE_MATHFN (GAMMA)
1925 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1926 CASE_MATHFN (HUGE_VAL)
1927 CASE_MATHFN (HYPOT)
1928 CASE_MATHFN (ILOGB)
1929 CASE_MATHFN (ICEIL)
1930 CASE_MATHFN (IFLOOR)
1931 CASE_MATHFN (INF)
1932 CASE_MATHFN (IRINT)
1933 CASE_MATHFN (IROUND)
1934 CASE_MATHFN (ISINF)
1935 CASE_MATHFN (J0)
1936 CASE_MATHFN (J1)
1937 CASE_MATHFN (JN)
1938 CASE_MATHFN (LCEIL)
1939 CASE_MATHFN (LDEXP)
1940 CASE_MATHFN (LFLOOR)
1941 CASE_MATHFN (LGAMMA)
1942 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1943 CASE_MATHFN (LLCEIL)
1944 CASE_MATHFN (LLFLOOR)
1945 CASE_MATHFN (LLRINT)
1946 CASE_MATHFN (LLROUND)
1947 CASE_MATHFN (LOG)
1948 CASE_MATHFN (LOG10)
1949 CASE_MATHFN (LOG1P)
1950 CASE_MATHFN (LOG2)
1951 CASE_MATHFN (LOGB)
1952 CASE_MATHFN (LRINT)
1953 CASE_MATHFN (LROUND)
1954 CASE_MATHFN (MODF)
1955 CASE_MATHFN (NAN)
1956 CASE_MATHFN (NANS)
1957 CASE_MATHFN_FLOATN (NEARBYINT)
1958 CASE_MATHFN (NEXTAFTER)
1959 CASE_MATHFN (NEXTTOWARD)
1960 CASE_MATHFN (POW)
1961 CASE_MATHFN (POWI)
1962 CASE_MATHFN (POW10)
1963 CASE_MATHFN (REMAINDER)
1964 CASE_MATHFN (REMQUO)
1965 CASE_MATHFN_FLOATN (RINT)
1966 CASE_MATHFN_FLOATN (ROUND)
1967 CASE_MATHFN (SCALB)
1968 CASE_MATHFN (SCALBLN)
1969 CASE_MATHFN (SCALBN)
1970 CASE_MATHFN (SIGNBIT)
1971 CASE_MATHFN (SIGNIFICAND)
1972 CASE_MATHFN (SIN)
1973 CASE_MATHFN (SINCOS)
1974 CASE_MATHFN (SINH)
1975 CASE_MATHFN_FLOATN (SQRT)
1976 CASE_MATHFN (TAN)
1977 CASE_MATHFN (TANH)
1978 CASE_MATHFN (TGAMMA)
1979 CASE_MATHFN_FLOATN (TRUNC)
1980 CASE_MATHFN (Y0)
1981 CASE_MATHFN (Y1)
1982 CASE_MATHFN (YN)
1984 default:
1985 return END_BUILTINS;
1988 mtype = TYPE_MAIN_VARIANT (type);
1989 if (mtype == double_type_node)
1990 return fcode;
1991 else if (mtype == float_type_node)
1992 return fcodef;
1993 else if (mtype == long_double_type_node)
1994 return fcodel;
1995 else if (mtype == float16_type_node)
1996 return fcodef16;
1997 else if (mtype == float32_type_node)
1998 return fcodef32;
1999 else if (mtype == float64_type_node)
2000 return fcodef64;
2001 else if (mtype == float128_type_node)
2002 return fcodef128;
2003 else if (mtype == float32x_type_node)
2004 return fcodef32x;
2005 else if (mtype == float64x_type_node)
2006 return fcodef64x;
2007 else if (mtype == float128x_type_node)
2008 return fcodef128x;
2009 else
2010 return END_BUILTINS;
2013 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2014 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2015 otherwise use the explicit declaration. If we can't do the conversion,
2016 return null. */
2018 static tree
2019 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2021 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2022 if (fcode2 == END_BUILTINS)
2023 return NULL_TREE;
2025 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2026 return NULL_TREE;
2028 return builtin_decl_explicit (fcode2);
2031 /* Like mathfn_built_in_1, but always use the implicit array. */
2033 tree
2034 mathfn_built_in (tree type, combined_fn fn)
2036 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2039 /* Like mathfn_built_in_1, but take a built_in_function and
2040 always use the implicit array. */
2042 tree
2043 mathfn_built_in (tree type, enum built_in_function fn)
2045 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2048 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2049 return its code, otherwise return IFN_LAST. Note that this function
2050 only tests whether the function is defined in internals.def, not whether
2051 it is actually available on the target. */
2053 internal_fn
2054 associated_internal_fn (tree fndecl)
2056 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2057 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2061 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2062 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2063 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2064 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2065 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2066 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2067 #include "internal-fn.def"
2069 CASE_FLT_FN (BUILT_IN_POW10):
2070 return IFN_EXP10;
2072 CASE_FLT_FN (BUILT_IN_DREM):
2073 return IFN_REMAINDER;
2075 CASE_FLT_FN (BUILT_IN_SCALBN):
2076 CASE_FLT_FN (BUILT_IN_SCALBLN):
2077 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2078 return IFN_LDEXP;
2079 return IFN_LAST;
2081 default:
2082 return IFN_LAST;
2086 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2087 on the current target by a call to an internal function, return the
2088 code of that internal function, otherwise return IFN_LAST. The caller
2089 is responsible for ensuring that any side-effects of the built-in
2090 call are dealt with correctly. E.g. if CALL sets errno, the caller
2091 must decide that the errno result isn't needed or make it available
2092 in some other way. */
2094 internal_fn
2095 replacement_internal_fn (gcall *call)
2097 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2099 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2100 if (ifn != IFN_LAST)
2102 tree_pair types = direct_internal_fn_types (ifn, call);
2103 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2104 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2105 return ifn;
2108 return IFN_LAST;
2111 /* Expand a call to the builtin trinary math functions (fma).
2112 Return NULL_RTX if a normal call should be emitted rather than expanding the
2113 function in-line. EXP is the expression that is a call to the builtin
2114 function; if convenient, the result should be placed in TARGET.
2115 SUBTARGET may be used as the target for computing one of EXP's
2116 operands. */
2118 static rtx
2119 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2121 optab builtin_optab;
2122 rtx op0, op1, op2, result;
2123 rtx_insn *insns;
2124 tree fndecl = get_callee_fndecl (exp);
2125 tree arg0, arg1, arg2;
2126 machine_mode mode;
2128 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2129 return NULL_RTX;
2131 arg0 = CALL_EXPR_ARG (exp, 0);
2132 arg1 = CALL_EXPR_ARG (exp, 1);
2133 arg2 = CALL_EXPR_ARG (exp, 2);
2135 switch (DECL_FUNCTION_CODE (fndecl))
2137 CASE_FLT_FN (BUILT_IN_FMA):
2138 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2139 builtin_optab = fma_optab; break;
2140 default:
2141 gcc_unreachable ();
2144 /* Make a suitable register to place result in. */
2145 mode = TYPE_MODE (TREE_TYPE (exp));
2147 /* Before working hard, check whether the instruction is available. */
2148 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2149 return NULL_RTX;
2151 result = gen_reg_rtx (mode);
2153 /* Always stabilize the argument list. */
2154 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2155 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2156 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2158 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2159 op1 = expand_normal (arg1);
2160 op2 = expand_normal (arg2);
2162 start_sequence ();
2164 /* Compute into RESULT.
2165 Set RESULT to wherever the result comes back. */
2166 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2167 result, 0);
2169 /* If we were unable to expand via the builtin, stop the sequence
2170 (without outputting the insns) and call to the library function
2171 with the stabilized argument list. */
2172 if (result == 0)
2174 end_sequence ();
2175 return expand_call (exp, target, target == const0_rtx);
2178 /* Output the entire sequence. */
2179 insns = get_insns ();
2180 end_sequence ();
2181 emit_insn (insns);
2183 return result;
2186 /* Expand a call to the builtin sin and cos math functions.
2187 Return NULL_RTX if a normal call should be emitted rather than expanding the
2188 function in-line. EXP is the expression that is a call to the builtin
2189 function; if convenient, the result should be placed in TARGET.
2190 SUBTARGET may be used as the target for computing one of EXP's
2191 operands. */
2193 static rtx
2194 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2196 optab builtin_optab;
2197 rtx op0;
2198 rtx_insn *insns;
2199 tree fndecl = get_callee_fndecl (exp);
2200 machine_mode mode;
2201 tree arg;
2203 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2204 return NULL_RTX;
2206 arg = CALL_EXPR_ARG (exp, 0);
2208 switch (DECL_FUNCTION_CODE (fndecl))
2210 CASE_FLT_FN (BUILT_IN_SIN):
2211 CASE_FLT_FN (BUILT_IN_COS):
2212 builtin_optab = sincos_optab; break;
2213 default:
2214 gcc_unreachable ();
2217 /* Make a suitable register to place result in. */
2218 mode = TYPE_MODE (TREE_TYPE (exp));
2220 /* Check if sincos insn is available, otherwise fallback
2221 to sin or cos insn. */
2222 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2223 switch (DECL_FUNCTION_CODE (fndecl))
2225 CASE_FLT_FN (BUILT_IN_SIN):
2226 builtin_optab = sin_optab; break;
2227 CASE_FLT_FN (BUILT_IN_COS):
2228 builtin_optab = cos_optab; break;
2229 default:
2230 gcc_unreachable ();
2233 /* Before working hard, check whether the instruction is available. */
2234 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2236 rtx result = gen_reg_rtx (mode);
2238 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2239 need to expand the argument again. This way, we will not perform
2240 side-effects more the once. */
2241 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2243 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2245 start_sequence ();
2247 /* Compute into RESULT.
2248 Set RESULT to wherever the result comes back. */
2249 if (builtin_optab == sincos_optab)
2251 int ok;
2253 switch (DECL_FUNCTION_CODE (fndecl))
2255 CASE_FLT_FN (BUILT_IN_SIN):
2256 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2257 break;
2258 CASE_FLT_FN (BUILT_IN_COS):
2259 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2260 break;
2261 default:
2262 gcc_unreachable ();
2264 gcc_assert (ok);
2266 else
2267 result = expand_unop (mode, builtin_optab, op0, result, 0);
2269 if (result != 0)
2271 /* Output the entire sequence. */
2272 insns = get_insns ();
2273 end_sequence ();
2274 emit_insn (insns);
2275 return result;
2278 /* If we were unable to expand via the builtin, stop the sequence
2279 (without outputting the insns) and call to the library function
2280 with the stabilized argument list. */
2281 end_sequence ();
2284 return expand_call (exp, target, target == const0_rtx);
2287 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2288 return an RTL instruction code that implements the functionality.
2289 If that isn't possible or available return CODE_FOR_nothing. */
2291 static enum insn_code
2292 interclass_mathfn_icode (tree arg, tree fndecl)
2294 bool errno_set = false;
2295 optab builtin_optab = unknown_optab;
2296 machine_mode mode;
2298 switch (DECL_FUNCTION_CODE (fndecl))
2300 CASE_FLT_FN (BUILT_IN_ILOGB):
2301 errno_set = true; builtin_optab = ilogb_optab; break;
2302 CASE_FLT_FN (BUILT_IN_ISINF):
2303 builtin_optab = isinf_optab; break;
2304 case BUILT_IN_ISNORMAL:
2305 case BUILT_IN_ISFINITE:
2306 CASE_FLT_FN (BUILT_IN_FINITE):
2307 case BUILT_IN_FINITED32:
2308 case BUILT_IN_FINITED64:
2309 case BUILT_IN_FINITED128:
2310 case BUILT_IN_ISINFD32:
2311 case BUILT_IN_ISINFD64:
2312 case BUILT_IN_ISINFD128:
2313 /* These builtins have no optabs (yet). */
2314 break;
2315 default:
2316 gcc_unreachable ();
2319 /* There's no easy way to detect the case we need to set EDOM. */
2320 if (flag_errno_math && errno_set)
2321 return CODE_FOR_nothing;
2323 /* Optab mode depends on the mode of the input argument. */
2324 mode = TYPE_MODE (TREE_TYPE (arg));
2326 if (builtin_optab)
2327 return optab_handler (builtin_optab, mode);
2328 return CODE_FOR_nothing;
2331 /* Expand a call to one of the builtin math functions that operate on
2332 floating point argument and output an integer result (ilogb, isinf,
2333 isnan, etc).
2334 Return 0 if a normal call should be emitted rather than expanding the
2335 function in-line. EXP is the expression that is a call to the builtin
2336 function; if convenient, the result should be placed in TARGET. */
2338 static rtx
2339 expand_builtin_interclass_mathfn (tree exp, rtx target)
2341 enum insn_code icode = CODE_FOR_nothing;
2342 rtx op0;
2343 tree fndecl = get_callee_fndecl (exp);
2344 machine_mode mode;
2345 tree arg;
2347 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2348 return NULL_RTX;
2350 arg = CALL_EXPR_ARG (exp, 0);
2351 icode = interclass_mathfn_icode (arg, fndecl);
2352 mode = TYPE_MODE (TREE_TYPE (arg));
2354 if (icode != CODE_FOR_nothing)
2356 struct expand_operand ops[1];
2357 rtx_insn *last = get_last_insn ();
2358 tree orig_arg = arg;
2360 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2361 need to expand the argument again. This way, we will not perform
2362 side-effects more the once. */
2363 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2365 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2367 if (mode != GET_MODE (op0))
2368 op0 = convert_to_mode (mode, op0, 0);
2370 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2371 if (maybe_legitimize_operands (icode, 0, 1, ops)
2372 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2373 return ops[0].value;
2375 delete_insns_since (last);
2376 CALL_EXPR_ARG (exp, 0) = orig_arg;
2379 return NULL_RTX;
2382 /* Expand a call to the builtin sincos math function.
2383 Return NULL_RTX if a normal call should be emitted rather than expanding the
2384 function in-line. EXP is the expression that is a call to the builtin
2385 function. */
2387 static rtx
2388 expand_builtin_sincos (tree exp)
2390 rtx op0, op1, op2, target1, target2;
2391 machine_mode mode;
2392 tree arg, sinp, cosp;
2393 int result;
2394 location_t loc = EXPR_LOCATION (exp);
2395 tree alias_type, alias_off;
2397 if (!validate_arglist (exp, REAL_TYPE,
2398 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2399 return NULL_RTX;
2401 arg = CALL_EXPR_ARG (exp, 0);
2402 sinp = CALL_EXPR_ARG (exp, 1);
2403 cosp = CALL_EXPR_ARG (exp, 2);
2405 /* Make a suitable register to place result in. */
2406 mode = TYPE_MODE (TREE_TYPE (arg));
2408 /* Check if sincos insn is available, otherwise emit the call. */
2409 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2410 return NULL_RTX;
2412 target1 = gen_reg_rtx (mode);
2413 target2 = gen_reg_rtx (mode);
2415 op0 = expand_normal (arg);
2416 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2417 alias_off = build_int_cst (alias_type, 0);
2418 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2419 sinp, alias_off));
2420 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2421 cosp, alias_off));
2423 /* Compute into target1 and target2.
2424 Set TARGET to wherever the result comes back. */
2425 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2426 gcc_assert (result);
2428 /* Move target1 and target2 to the memory locations indicated
2429 by op1 and op2. */
2430 emit_move_insn (op1, target1);
2431 emit_move_insn (op2, target2);
2433 return const0_rtx;
2436 /* Expand a call to the internal cexpi builtin to the sincos math function.
2437 EXP is the expression that is a call to the builtin function; if convenient,
2438 the result should be placed in TARGET. */
2440 static rtx
2441 expand_builtin_cexpi (tree exp, rtx target)
2443 tree fndecl = get_callee_fndecl (exp);
2444 tree arg, type;
2445 machine_mode mode;
2446 rtx op0, op1, op2;
2447 location_t loc = EXPR_LOCATION (exp);
2449 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2450 return NULL_RTX;
2452 arg = CALL_EXPR_ARG (exp, 0);
2453 type = TREE_TYPE (arg);
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2456 /* Try expanding via a sincos optab, fall back to emitting a libcall
2457 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2458 is only generated from sincos, cexp or if we have either of them. */
2459 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2461 op1 = gen_reg_rtx (mode);
2462 op2 = gen_reg_rtx (mode);
2464 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466 /* Compute into op1 and op2. */
2467 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2469 else if (targetm.libc_has_function (function_sincos))
2471 tree call, fn = NULL_TREE;
2472 tree top1, top2;
2473 rtx op1a, op2a;
2475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2476 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2478 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2480 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2481 else
2482 gcc_unreachable ();
2484 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2485 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2486 op1a = copy_addr_to_reg (XEXP (op1, 0));
2487 op2a = copy_addr_to_reg (XEXP (op2, 0));
2488 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2489 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2491 /* Make sure not to fold the sincos call again. */
2492 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2493 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2494 call, 3, arg, top1, top2));
2496 else
2498 tree call, fn = NULL_TREE, narg;
2499 tree ctype = build_complex_type (type);
2501 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2502 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2504 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2506 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2507 else
2508 gcc_unreachable ();
2510 /* If we don't have a decl for cexp create one. This is the
2511 friendliest fallback if the user calls __builtin_cexpi
2512 without full target C99 function support. */
2513 if (fn == NULL_TREE)
2515 tree fntype;
2516 const char *name = NULL;
2518 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2519 name = "cexpf";
2520 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2521 name = "cexp";
2522 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2523 name = "cexpl";
2525 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2526 fn = build_fn_decl (name, fntype);
2529 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2530 build_real (type, dconst0), arg);
2532 /* Make sure not to fold the cexp call again. */
2533 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2534 return expand_expr (build_call_nary (ctype, call, 1, narg),
2535 target, VOIDmode, EXPAND_NORMAL);
2538 /* Now build the proper return type. */
2539 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2540 make_tree (TREE_TYPE (arg), op2),
2541 make_tree (TREE_TYPE (arg), op1)),
2542 target, VOIDmode, EXPAND_NORMAL);
2545 /* Conveniently construct a function call expression. FNDECL names the
2546 function to be called, N is the number of arguments, and the "..."
2547 parameters are the argument expressions. Unlike build_call_exr
2548 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2550 static tree
2551 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2553 va_list ap;
2554 tree fntype = TREE_TYPE (fndecl);
2555 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2557 va_start (ap, n);
2558 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2559 va_end (ap);
2560 SET_EXPR_LOCATION (fn, loc);
2561 return fn;
2564 /* Expand a call to one of the builtin rounding functions gcc defines
2565 as an extension (lfloor and lceil). As these are gcc extensions we
2566 do not need to worry about setting errno to EDOM.
2567 If expanding via optab fails, lower expression to (int)(floor(x)).
2568 EXP is the expression that is a call to the builtin function;
2569 if convenient, the result should be placed in TARGET. */
2571 static rtx
2572 expand_builtin_int_roundingfn (tree exp, rtx target)
2574 convert_optab builtin_optab;
2575 rtx op0, tmp;
2576 rtx_insn *insns;
2577 tree fndecl = get_callee_fndecl (exp);
2578 enum built_in_function fallback_fn;
2579 tree fallback_fndecl;
2580 machine_mode mode;
2581 tree arg;
2583 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2584 gcc_unreachable ();
2586 arg = CALL_EXPR_ARG (exp, 0);
2588 switch (DECL_FUNCTION_CODE (fndecl))
2590 CASE_FLT_FN (BUILT_IN_ICEIL):
2591 CASE_FLT_FN (BUILT_IN_LCEIL):
2592 CASE_FLT_FN (BUILT_IN_LLCEIL):
2593 builtin_optab = lceil_optab;
2594 fallback_fn = BUILT_IN_CEIL;
2595 break;
2597 CASE_FLT_FN (BUILT_IN_IFLOOR):
2598 CASE_FLT_FN (BUILT_IN_LFLOOR):
2599 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2600 builtin_optab = lfloor_optab;
2601 fallback_fn = BUILT_IN_FLOOR;
2602 break;
2604 default:
2605 gcc_unreachable ();
2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2611 target = gen_reg_rtx (mode);
2613 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2614 need to expand the argument again. This way, we will not perform
2615 side-effects more the once. */
2616 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2618 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2620 start_sequence ();
2622 /* Compute into TARGET. */
2623 if (expand_sfix_optab (target, op0, builtin_optab))
2625 /* Output the entire sequence. */
2626 insns = get_insns ();
2627 end_sequence ();
2628 emit_insn (insns);
2629 return target;
2632 /* If we were unable to expand via the builtin, stop the sequence
2633 (without outputting the insns). */
2634 end_sequence ();
2636 /* Fall back to floating point rounding optab. */
2637 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2639 /* For non-C99 targets we may end up without a fallback fndecl here
2640 if the user called __builtin_lfloor directly. In this case emit
2641 a call to the floor/ceil variants nevertheless. This should result
2642 in the best user experience for not full C99 targets. */
2643 if (fallback_fndecl == NULL_TREE)
2645 tree fntype;
2646 const char *name = NULL;
2648 switch (DECL_FUNCTION_CODE (fndecl))
2650 case BUILT_IN_ICEIL:
2651 case BUILT_IN_LCEIL:
2652 case BUILT_IN_LLCEIL:
2653 name = "ceil";
2654 break;
2655 case BUILT_IN_ICEILF:
2656 case BUILT_IN_LCEILF:
2657 case BUILT_IN_LLCEILF:
2658 name = "ceilf";
2659 break;
2660 case BUILT_IN_ICEILL:
2661 case BUILT_IN_LCEILL:
2662 case BUILT_IN_LLCEILL:
2663 name = "ceill";
2664 break;
2665 case BUILT_IN_IFLOOR:
2666 case BUILT_IN_LFLOOR:
2667 case BUILT_IN_LLFLOOR:
2668 name = "floor";
2669 break;
2670 case BUILT_IN_IFLOORF:
2671 case BUILT_IN_LFLOORF:
2672 case BUILT_IN_LLFLOORF:
2673 name = "floorf";
2674 break;
2675 case BUILT_IN_IFLOORL:
2676 case BUILT_IN_LFLOORL:
2677 case BUILT_IN_LLFLOORL:
2678 name = "floorl";
2679 break;
2680 default:
2681 gcc_unreachable ();
2684 fntype = build_function_type_list (TREE_TYPE (arg),
2685 TREE_TYPE (arg), NULL_TREE);
2686 fallback_fndecl = build_fn_decl (name, fntype);
2689 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2691 tmp = expand_normal (exp);
2692 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2694 /* Truncate the result of floating point optab to integer
2695 via expand_fix (). */
2696 target = gen_reg_rtx (mode);
2697 expand_fix (target, tmp, 0);
2699 return target;
2702 /* Expand a call to one of the builtin math functions doing integer
2703 conversion (lrint).
2704 Return 0 if a normal call should be emitted rather than expanding the
2705 function in-line. EXP is the expression that is a call to the builtin
2706 function; if convenient, the result should be placed in TARGET. */
2708 static rtx
2709 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2711 convert_optab builtin_optab;
2712 rtx op0;
2713 rtx_insn *insns;
2714 tree fndecl = get_callee_fndecl (exp);
2715 tree arg;
2716 machine_mode mode;
2717 enum built_in_function fallback_fn = BUILT_IN_NONE;
2719 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2720 gcc_unreachable ();
2722 arg = CALL_EXPR_ARG (exp, 0);
2724 switch (DECL_FUNCTION_CODE (fndecl))
2726 CASE_FLT_FN (BUILT_IN_IRINT):
2727 fallback_fn = BUILT_IN_LRINT;
2728 gcc_fallthrough ();
2729 CASE_FLT_FN (BUILT_IN_LRINT):
2730 CASE_FLT_FN (BUILT_IN_LLRINT):
2731 builtin_optab = lrint_optab;
2732 break;
2734 CASE_FLT_FN (BUILT_IN_IROUND):
2735 fallback_fn = BUILT_IN_LROUND;
2736 gcc_fallthrough ();
2737 CASE_FLT_FN (BUILT_IN_LROUND):
2738 CASE_FLT_FN (BUILT_IN_LLROUND):
2739 builtin_optab = lround_optab;
2740 break;
2742 default:
2743 gcc_unreachable ();
2746 /* There's no easy way to detect the case we need to set EDOM. */
2747 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2748 return NULL_RTX;
2750 /* Make a suitable register to place result in. */
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2753 /* There's no easy way to detect the case we need to set EDOM. */
2754 if (!flag_errno_math)
2756 rtx result = gen_reg_rtx (mode);
2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
2761 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2763 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2765 start_sequence ();
2767 if (expand_sfix_optab (result, op0, builtin_optab))
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2771 end_sequence ();
2772 emit_insn (insns);
2773 return result;
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns) and call to the library function
2778 with the stabilized argument list. */
2779 end_sequence ();
2782 if (fallback_fn != BUILT_IN_NONE)
2784 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2785 targets, (int) round (x) should never be transformed into
2786 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2787 a call to lround in the hope that the target provides at least some
2788 C99 functions. This should result in the best user experience for
2789 not full C99 targets. */
2790 tree fallback_fndecl = mathfn_built_in_1
2791 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2794 fallback_fndecl, 1, arg);
2796 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2797 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2798 return convert_to_mode (mode, target, 0);
2801 return expand_call (exp, target, target == const0_rtx);
2804 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2805 a normal call should be emitted rather than expanding the function
2806 in-line. EXP is the expression that is a call to the builtin
2807 function; if convenient, the result should be placed in TARGET. */
2809 static rtx
2810 expand_builtin_powi (tree exp, rtx target)
2812 tree arg0, arg1;
2813 rtx op0, op1;
2814 machine_mode mode;
2815 machine_mode mode2;
2817 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2818 return NULL_RTX;
2820 arg0 = CALL_EXPR_ARG (exp, 0);
2821 arg1 = CALL_EXPR_ARG (exp, 1);
2822 mode = TYPE_MODE (TREE_TYPE (exp));
2824 /* Emit a libcall to libgcc. */
2826 /* Mode of the 2nd argument must match that of an int. */
2827 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2829 if (target == NULL_RTX)
2830 target = gen_reg_rtx (mode);
2832 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2833 if (GET_MODE (op0) != mode)
2834 op0 = convert_to_mode (mode, op0, 0);
2835 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2836 if (GET_MODE (op1) != mode2)
2837 op1 = convert_to_mode (mode2, op1, 0);
2839 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2840 target, LCT_CONST, mode,
2841 op0, mode, op1, mode2);
2843 return target;
2846 /* Expand expression EXP which is a call to the strlen builtin. Return
2847 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2848 try to get the result in TARGET, if convenient. */
2850 static rtx
2851 expand_builtin_strlen (tree exp, rtx target,
2852 machine_mode target_mode)
2854 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2855 return NULL_RTX;
2857 struct expand_operand ops[4];
2858 rtx pat;
2859 tree len;
2860 tree src = CALL_EXPR_ARG (exp, 0);
2861 rtx src_reg;
2862 rtx_insn *before_strlen;
2863 machine_mode insn_mode;
2864 enum insn_code icode = CODE_FOR_nothing;
2865 unsigned int align;
2867 /* If the length can be computed at compile-time, return it. */
2868 len = c_strlen (src, 0);
2869 if (len)
2870 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2872 /* If the length can be computed at compile-time and is constant
2873 integer, but there are side-effects in src, evaluate
2874 src for side-effects, then return len.
2875 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2876 can be optimized into: i++; x = 3; */
2877 len = c_strlen (src, 1);
2878 if (len && TREE_CODE (len) == INTEGER_CST)
2880 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2881 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2884 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2886 /* If SRC is not a pointer type, don't do this operation inline. */
2887 if (align == 0)
2888 return NULL_RTX;
2890 /* Bail out if we can't compute strlen in the right mode. */
2891 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2893 icode = optab_handler (strlen_optab, insn_mode);
2894 if (icode != CODE_FOR_nothing)
2895 break;
2897 if (insn_mode == VOIDmode)
2898 return NULL_RTX;
2900 /* Make a place to hold the source address. We will not expand
2901 the actual source until we are sure that the expansion will
2902 not fail -- there are trees that cannot be expanded twice. */
2903 src_reg = gen_reg_rtx (Pmode);
2905 /* Mark the beginning of the strlen sequence so we can emit the
2906 source operand later. */
2907 before_strlen = get_last_insn ();
2909 create_output_operand (&ops[0], target, insn_mode);
2910 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2911 create_integer_operand (&ops[2], 0);
2912 create_integer_operand (&ops[3], align);
2913 if (!maybe_expand_insn (icode, 4, ops))
2914 return NULL_RTX;
2916 /* Check to see if the argument was declared attribute nonstring
2917 and if so, issue a warning since at this point it's not known
2918 to be nul-terminated. */
2919 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2921 /* Now that we are assured of success, expand the source. */
2922 start_sequence ();
2923 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2924 if (pat != src_reg)
2926 #ifdef POINTERS_EXTEND_UNSIGNED
2927 if (GET_MODE (pat) != Pmode)
2928 pat = convert_to_mode (Pmode, pat,
2929 POINTERS_EXTEND_UNSIGNED);
2930 #endif
2931 emit_move_insn (src_reg, pat);
2933 pat = get_insns ();
2934 end_sequence ();
2936 if (before_strlen)
2937 emit_insn_after (pat, before_strlen);
2938 else
2939 emit_insn_before (pat, get_insns ());
2941 /* Return the value in the proper mode for this function. */
2942 if (GET_MODE (ops[0].value) == target_mode)
2943 target = ops[0].value;
2944 else if (target != 0)
2945 convert_move (target, ops[0].value, 0);
2946 else
2947 target = convert_to_mode (target_mode, ops[0].value, 0);
2949 return target;
2952 /* Expand call EXP to the strnlen built-in, returning the result
2953 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2955 static rtx
2956 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2958 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2961 tree src = CALL_EXPR_ARG (exp, 0);
2962 tree bound = CALL_EXPR_ARG (exp, 1);
2964 if (!bound)
2965 return NULL_RTX;
2967 location_t loc = UNKNOWN_LOCATION;
2968 if (EXPR_HAS_LOCATION (exp))
2969 loc = EXPR_LOCATION (exp);
2971 tree maxobjsize = max_object_size ();
2972 tree func = get_callee_fndecl (exp);
2974 tree len = c_strlen (src, 0);
2976 if (TREE_CODE (bound) == INTEGER_CST)
2978 if (!TREE_NO_WARNING (exp)
2979 && tree_int_cst_lt (maxobjsize, bound)
2980 && warning_at (loc, OPT_Wstringop_overflow_,
2981 "%K%qD specified bound %E "
2982 "exceeds maximum object size %E",
2983 exp, func, bound, maxobjsize))
2984 TREE_NO_WARNING (exp) = true;
2986 if (!len || TREE_CODE (len) != INTEGER_CST)
2987 return NULL_RTX;
2989 len = fold_convert_loc (loc, size_type_node, len);
2990 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2991 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2994 if (TREE_CODE (bound) != SSA_NAME)
2995 return NULL_RTX;
2997 wide_int min, max;
2998 enum value_range_type rng = get_range_info (bound, &min, &max);
2999 if (rng != VR_RANGE)
3000 return NULL_RTX;
3002 if (!TREE_NO_WARNING (exp)
3003 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3004 && warning_at (loc, OPT_Wstringop_overflow_,
3005 "%K%qD specified bound [%wu, %wu] "
3006 "exceeds maximum object size %E",
3007 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3008 TREE_NO_WARNING (exp) = true;
3010 if (!len || TREE_CODE (len) != INTEGER_CST)
3011 return NULL_RTX;
3013 if (wi::gtu_p (min, wi::to_wide (len)))
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3021 bytes from constant string DATA + OFFSET and return it as target
3022 constant. */
3024 static rtx
3025 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3026 scalar_int_mode mode)
3028 const char *str = (const char *) data;
3030 gcc_assert (offset >= 0
3031 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3032 <= strlen (str) + 1));
3034 return c_readstr (str + offset, mode);
3037 /* LEN specify length of the block of memcpy/memset operation.
3038 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3039 In some cases we can make very likely guess on max size, then we
3040 set it into PROBABLE_MAX_SIZE. */
3042 static void
3043 determine_block_size (tree len, rtx len_rtx,
3044 unsigned HOST_WIDE_INT *min_size,
3045 unsigned HOST_WIDE_INT *max_size,
3046 unsigned HOST_WIDE_INT *probable_max_size)
3048 if (CONST_INT_P (len_rtx))
3050 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3051 return;
3053 else
3055 wide_int min, max;
3056 enum value_range_type range_type = VR_UNDEFINED;
3058 /* Determine bounds from the type. */
3059 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3060 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3061 else
3062 *min_size = 0;
3063 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3064 *probable_max_size = *max_size
3065 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3066 else
3067 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3069 if (TREE_CODE (len) == SSA_NAME)
3070 range_type = get_range_info (len, &min, &max);
3071 if (range_type == VR_RANGE)
3073 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3074 *min_size = min.to_uhwi ();
3075 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3076 *probable_max_size = *max_size = max.to_uhwi ();
3078 else if (range_type == VR_ANTI_RANGE)
3080 /* Anti range 0...N lets us to determine minimal size to N+1. */
3081 if (min == 0)
3083 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3084 *min_size = max.to_uhwi () + 1;
3086 /* Code like
3088 int n;
3089 if (n < 100)
3090 memcpy (a, b, n)
3092 Produce anti range allowing negative values of N. We still
3093 can use the information and make a guess that N is not negative.
3095 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3096 *probable_max_size = min.to_uhwi () - 1;
3099 gcc_checking_assert (*max_size <=
3100 (unsigned HOST_WIDE_INT)
3101 GET_MODE_MASK (GET_MODE (len_rtx)));
3104 /* Try to verify that the sizes and lengths of the arguments to a string
3105 manipulation function given by EXP are within valid bounds and that
3106 the operation does not lead to buffer overflow or read past the end.
3107 Arguments other than EXP may be null. When non-null, the arguments
3108 have the following meaning:
3109 DST is the destination of a copy call or NULL otherwise.
3110 SRC is the source of a copy call or NULL otherwise.
3111 DSTWRITE is the number of bytes written into the destination obtained
3112 from the user-supplied size argument to the function (such as in
3113 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3114 MAXREAD is the user-supplied bound on the length of the source sequence
3115 (such as in strncat(d, s, N). It specifies the upper limit on the number
3116 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3117 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3118 expression EXP is a string function call (as opposed to a memory call
3119 like memcpy). As an exception, SRCSTR can also be an integer denoting
3120 the precomputed size of the source string or object (for functions like
3121 memcpy).
3122 DSTSIZE is the size of the destination object specified by the last
3123 argument to the _chk builtins, typically resulting from the expansion
3124 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3125 DSTSIZE).
3127 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3128 SIZE_MAX.
3130 If the call is successfully verified as safe return true, otherwise
3131 return false. */
3133 static bool
3134 check_access (tree exp, tree, tree, tree dstwrite,
3135 tree maxread, tree srcstr, tree dstsize)
3137 int opt = OPT_Wstringop_overflow_;
3139 /* The size of the largest object is half the address space, or
3140 PTRDIFF_MAX. (This is way too permissive.) */
3141 tree maxobjsize = max_object_size ();
3143 /* Either the length of the source string for string functions or
3144 the size of the source object for raw memory functions. */
3145 tree slen = NULL_TREE;
3147 tree range[2] = { NULL_TREE, NULL_TREE };
3149 /* Set to true when the exact number of bytes written by a string
3150 function like strcpy is not known and the only thing that is
3151 known is that it must be at least one (for the terminating nul). */
3152 bool at_least_one = false;
3153 if (srcstr)
3155 /* SRCSTR is normally a pointer to string but as a special case
3156 it can be an integer denoting the length of a string. */
3157 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3159 /* Try to determine the range of lengths the source string
3160 refers to. If it can be determined and is less than
3161 the upper bound given by MAXREAD add one to it for
3162 the terminating nul. Otherwise, set it to one for
3163 the same reason, or to MAXREAD as appropriate. */
3164 get_range_strlen (srcstr, range);
3165 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3167 if (maxread && tree_int_cst_le (maxread, range[0]))
3168 range[0] = range[1] = maxread;
3169 else
3170 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3171 range[0], size_one_node);
3173 if (maxread && tree_int_cst_le (maxread, range[1]))
3174 range[1] = maxread;
3175 else if (!integer_all_onesp (range[1]))
3176 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3177 range[1], size_one_node);
3179 slen = range[0];
3181 else
3183 at_least_one = true;
3184 slen = size_one_node;
3187 else
3188 slen = srcstr;
3191 if (!dstwrite && !maxread)
3193 /* When the only available piece of data is the object size
3194 there is nothing to do. */
3195 if (!slen)
3196 return true;
3198 /* Otherwise, when the length of the source sequence is known
3199 (as with strlen), set DSTWRITE to it. */
3200 if (!range[0])
3201 dstwrite = slen;
3204 if (!dstsize)
3205 dstsize = maxobjsize;
3207 if (dstwrite)
3208 get_size_range (dstwrite, range);
3210 tree func = get_callee_fndecl (exp);
3212 /* First check the number of bytes to be written against the maximum
3213 object size. */
3214 if (range[0]
3215 && TREE_CODE (range[0]) == INTEGER_CST
3216 && tree_int_cst_lt (maxobjsize, range[0]))
3218 if (TREE_NO_WARNING (exp))
3219 return false;
3221 location_t loc = tree_nonartificial_location (exp);
3222 loc = expansion_point_location_if_in_system_header (loc);
3224 bool warned;
3225 if (range[0] == range[1])
3226 warned = warning_at (loc, opt,
3227 "%K%qD specified size %E "
3228 "exceeds maximum object size %E",
3229 exp, func, range[0], maxobjsize);
3230 else
3231 warned = warning_at (loc, opt,
3232 "%K%qD specified size between %E and %E "
3233 "exceeds maximum object size %E",
3234 exp, func,
3235 range[0], range[1], maxobjsize);
3236 if (warned)
3237 TREE_NO_WARNING (exp) = true;
3239 return false;
3242 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3243 constant, and in range of unsigned HOST_WIDE_INT. */
3244 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3246 /* Next check the number of bytes to be written against the destination
3247 object size. */
3248 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3250 if (range[0]
3251 && TREE_CODE (range[0]) == INTEGER_CST
3252 && ((tree_fits_uhwi_p (dstsize)
3253 && tree_int_cst_lt (dstsize, range[0]))
3254 || (dstwrite
3255 && tree_fits_uhwi_p (dstwrite)
3256 && tree_int_cst_lt (dstwrite, range[0]))))
3258 if (TREE_NO_WARNING (exp))
3259 return false;
3261 location_t loc = tree_nonartificial_location (exp);
3262 loc = expansion_point_location_if_in_system_header (loc);
3264 if (dstwrite == slen && at_least_one)
3266 /* This is a call to strcpy with a destination of 0 size
3267 and a source of unknown length. The call will write
3268 at least one byte past the end of the destination. */
3269 warning_at (loc, opt,
3270 "%K%qD writing %E or more bytes into a region "
3271 "of size %E overflows the destination",
3272 exp, func, range[0], dstsize);
3274 else if (tree_int_cst_equal (range[0], range[1]))
3275 warning_n (loc, opt, tree_to_uhwi (range[0]),
3276 "%K%qD writing %E byte into a region "
3277 "of size %E overflows the destination",
3278 "%K%qD writing %E bytes into a region "
3279 "of size %E overflows the destination",
3280 exp, func, range[0], dstsize);
3281 else if (tree_int_cst_sign_bit (range[1]))
3283 /* Avoid printing the upper bound if it's invalid. */
3284 warning_at (loc, opt,
3285 "%K%qD writing %E or more bytes into a region "
3286 "of size %E overflows the destination",
3287 exp, func, range[0], dstsize);
3289 else
3290 warning_at (loc, opt,
3291 "%K%qD writing between %E and %E bytes into "
3292 "a region of size %E overflows the destination",
3293 exp, func, range[0], range[1],
3294 dstsize);
3296 /* Return error when an overflow has been detected. */
3297 return false;
3301 /* Check the maximum length of the source sequence against the size
3302 of the destination object if known, or against the maximum size
3303 of an object. */
3304 if (maxread)
3306 get_size_range (maxread, range);
3308 /* Use the lower end for MAXREAD from now on. */
3309 if (range[0])
3310 maxread = range[0];
3312 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3314 location_t loc = tree_nonartificial_location (exp);
3315 loc = expansion_point_location_if_in_system_header (loc);
3317 if (tree_int_cst_lt (maxobjsize, range[0]))
3319 if (TREE_NO_WARNING (exp))
3320 return false;
3322 /* Warn about crazy big sizes first since that's more
3323 likely to be meaningful than saying that the bound
3324 is greater than the object size if both are big. */
3325 if (range[0] == range[1])
3326 warning_at (loc, opt,
3327 "%K%qD specified bound %E "
3328 "exceeds maximum object size %E",
3329 exp, func,
3330 range[0], maxobjsize);
3331 else
3332 warning_at (loc, opt,
3333 "%K%qD specified bound between %E and %E "
3334 "exceeds maximum object size %E",
3335 exp, func,
3336 range[0], range[1], maxobjsize);
3338 return false;
3341 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3343 if (TREE_NO_WARNING (exp))
3344 return false;
3346 if (tree_int_cst_equal (range[0], range[1]))
3347 warning_at (loc, opt,
3348 "%K%qD specified bound %E "
3349 "exceeds destination size %E",
3350 exp, func,
3351 range[0], dstsize);
3352 else
3353 warning_at (loc, opt,
3354 "%K%qD specified bound between %E and %E "
3355 "exceeds destination size %E",
3356 exp, func,
3357 range[0], range[1], dstsize);
3358 return false;
3363 /* Check for reading past the end of SRC. */
3364 if (slen
3365 && slen == srcstr
3366 && dstwrite && range[0]
3367 && tree_int_cst_lt (slen, range[0]))
3369 if (TREE_NO_WARNING (exp))
3370 return false;
3372 location_t loc = tree_nonartificial_location (exp);
3374 if (tree_int_cst_equal (range[0], range[1]))
3375 warning_n (loc, opt, tree_to_uhwi (range[0]),
3376 "%K%qD reading %E byte from a region of size %E",
3377 "%K%qD reading %E bytes from a region of size %E",
3378 exp, func, range[0], slen);
3379 else if (tree_int_cst_sign_bit (range[1]))
3381 /* Avoid printing the upper bound if it's invalid. */
3382 warning_at (loc, opt,
3383 "%K%qD reading %E or more bytes from a region "
3384 "of size %E",
3385 exp, func, range[0], slen);
3387 else
3388 warning_at (loc, opt,
3389 "%K%qD reading between %E and %E bytes from a region "
3390 "of size %E",
3391 exp, func, range[0], range[1], slen);
3392 return false;
3395 return true;
3398 /* Helper to compute the size of the object referenced by the DEST
3399 expression which must have pointer type, using Object Size type
3400 OSTYPE (only the least significant 2 bits are used). Return
3401 an estimate of the size of the object if successful or NULL when
3402 the size cannot be determined. When the referenced object involves
3403 a non-constant offset in some range the returned value represents
3404 the largest size given the smallest non-negative offset in the
3405 range. The function is intended for diagnostics and should not
3406 be used to influence code generation or optimization. */
3408 tree
3409 compute_objsize (tree dest, int ostype)
3411 unsigned HOST_WIDE_INT size;
3413 /* Only the two least significant bits are meaningful. */
3414 ostype &= 3;
3416 if (compute_builtin_object_size (dest, ostype, &size))
3417 return build_int_cst (sizetype, size);
3419 if (TREE_CODE (dest) == SSA_NAME)
3421 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3422 if (!is_gimple_assign (stmt))
3423 return NULL_TREE;
3425 dest = gimple_assign_rhs1 (stmt);
3427 tree_code code = gimple_assign_rhs_code (stmt);
3428 if (code == POINTER_PLUS_EXPR)
3430 /* compute_builtin_object_size fails for addresses with
3431 non-constant offsets. Try to determine the range of
3432 such an offset here and use it to adjust the constant
3433 size. */
3434 tree off = gimple_assign_rhs2 (stmt);
3435 if (TREE_CODE (off) == INTEGER_CST)
3437 if (tree size = compute_objsize (dest, ostype))
3439 wide_int wioff = wi::to_wide (off);
3440 wide_int wisiz = wi::to_wide (size);
3442 /* Ignore negative offsets for now. For others,
3443 use the lower bound as the most optimistic
3444 estimate of the (remaining) size. */
3445 if (wi::sign_mask (wioff))
3447 else if (wi::ltu_p (wioff, wisiz))
3448 return wide_int_to_tree (TREE_TYPE (size),
3449 wi::sub (wisiz, wioff));
3450 else
3451 return size_zero_node;
3454 else if (TREE_CODE (off) == SSA_NAME
3455 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3457 wide_int min, max;
3458 enum value_range_type rng = get_range_info (off, &min, &max);
3460 if (rng == VR_RANGE)
3462 if (tree size = compute_objsize (dest, ostype))
3464 wide_int wisiz = wi::to_wide (size);
3466 /* Ignore negative offsets for now. For others,
3467 use the lower bound as the most optimistic
3468 estimate of the (remaining)size. */
3469 if (wi::sign_mask (min))
3471 else if (wi::ltu_p (min, wisiz))
3472 return wide_int_to_tree (TREE_TYPE (size),
3473 wi::sub (wisiz, min));
3474 else
3475 return size_zero_node;
3480 else if (code != ADDR_EXPR)
3481 return NULL_TREE;
3484 /* Unless computing the largest size (for memcpy and other raw memory
3485 functions), try to determine the size of the object from its type. */
3486 if (!ostype)
3487 return NULL_TREE;
3489 if (TREE_CODE (dest) != ADDR_EXPR)
3490 return NULL_TREE;
3492 tree type = TREE_TYPE (dest);
3493 if (TREE_CODE (type) == POINTER_TYPE)
3494 type = TREE_TYPE (type);
3496 type = TYPE_MAIN_VARIANT (type);
3498 if (TREE_CODE (type) == ARRAY_TYPE
3499 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3501 /* Return the constant size unless it's zero (that's a zero-length
3502 array likely at the end of a struct). */
3503 tree size = TYPE_SIZE_UNIT (type);
3504 if (size && TREE_CODE (size) == INTEGER_CST
3505 && !integer_zerop (size))
3506 return size;
3509 return NULL_TREE;
3512 /* Helper to determine and check the sizes of the source and the destination
3513 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3514 call expression, DEST is the destination argument, SRC is the source
3515 argument or null, and LEN is the number of bytes. Use Object Size type-0
3516 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3517 (no overflow or invalid sizes), false otherwise. */
3519 static bool
3520 check_memop_access (tree exp, tree dest, tree src, tree size)
3522 /* For functions like memset and memcpy that operate on raw memory
3523 try to determine the size of the largest source and destination
3524 object using type-0 Object Size regardless of the object size
3525 type specified by the option. */
3526 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3527 tree dstsize = compute_objsize (dest, 0);
3529 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3530 srcsize, dstsize);
3533 /* Validate memchr arguments without performing any expansion.
3534 Return NULL_RTX. */
3536 static rtx
3537 expand_builtin_memchr (tree exp, rtx)
3539 if (!validate_arglist (exp,
3540 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3541 return NULL_RTX;
3543 tree arg1 = CALL_EXPR_ARG (exp, 0);
3544 tree len = CALL_EXPR_ARG (exp, 2);
3546 /* Diagnose calls where the specified length exceeds the size
3547 of the object. */
3548 if (warn_stringop_overflow)
3550 tree size = compute_objsize (arg1, 0);
3551 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3552 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3555 return NULL_RTX;
3558 /* Expand a call EXP to the memcpy builtin.
3559 Return NULL_RTX if we failed, the caller should emit a normal call,
3560 otherwise try to get the result in TARGET, if convenient (and in
3561 mode MODE if that's convenient). */
3563 static rtx
3564 expand_builtin_memcpy (tree exp, rtx target)
3566 if (!validate_arglist (exp,
3567 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3570 tree dest = CALL_EXPR_ARG (exp, 0);
3571 tree src = CALL_EXPR_ARG (exp, 1);
3572 tree len = CALL_EXPR_ARG (exp, 2);
3574 check_memop_access (exp, dest, src, len);
3576 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3577 /*endp=*/ 0);
3580 /* Check a call EXP to the memmove built-in for validity.
3581 Return NULL_RTX on both success and failure. */
3583 static rtx
3584 expand_builtin_memmove (tree exp, rtx)
3586 if (!validate_arglist (exp,
3587 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3588 return NULL_RTX;
3590 tree dest = CALL_EXPR_ARG (exp, 0);
3591 tree src = CALL_EXPR_ARG (exp, 1);
3592 tree len = CALL_EXPR_ARG (exp, 2);
3594 check_memop_access (exp, dest, src, len);
3596 return NULL_RTX;
3599 /* Expand a call EXP to the mempcpy builtin.
3600 Return NULL_RTX if we failed; the caller should emit a normal call,
3601 otherwise try to get the result in TARGET, if convenient (and in
3602 mode MODE if that's convenient). If ENDP is 0 return the
3603 destination pointer, if ENDP is 1 return the end pointer ala
3604 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3605 stpcpy. */
3607 static rtx
3608 expand_builtin_mempcpy (tree exp, rtx target)
3610 if (!validate_arglist (exp,
3611 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3612 return NULL_RTX;
3614 tree dest = CALL_EXPR_ARG (exp, 0);
3615 tree src = CALL_EXPR_ARG (exp, 1);
3616 tree len = CALL_EXPR_ARG (exp, 2);
3618 /* Policy does not generally allow using compute_objsize (which
3619 is used internally by check_memop_size) to change code generation
3620 or drive optimization decisions.
3622 In this instance it is safe because the code we generate has
3623 the same semantics regardless of the return value of
3624 check_memop_sizes. Exactly the same amount of data is copied
3625 and the return value is exactly the same in both cases.
3627 Furthermore, check_memop_size always uses mode 0 for the call to
3628 compute_objsize, so the imprecise nature of compute_objsize is
3629 avoided. */
3631 /* Avoid expanding mempcpy into memcpy when the call is determined
3632 to overflow the buffer. This also prevents the same overflow
3633 from being diagnosed again when expanding memcpy. */
3634 if (!check_memop_access (exp, dest, src, len))
3635 return NULL_RTX;
3637 return expand_builtin_mempcpy_args (dest, src, len,
3638 target, exp, /*endp=*/ 1);
3641 /* Helper function to do the actual work for expand of memory copy family
3642 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3643 of memory from SRC to DEST and assign to TARGET if convenient.
3644 If ENDP is 0 return the
3645 destination pointer, if ENDP is 1 return the end pointer ala
3646 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3647 stpcpy. */
3649 static rtx
3650 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3651 rtx target, tree exp, int endp)
3653 const char *src_str;
3654 unsigned int src_align = get_pointer_alignment (src);
3655 unsigned int dest_align = get_pointer_alignment (dest);
3656 rtx dest_mem, src_mem, dest_addr, len_rtx;
3657 HOST_WIDE_INT expected_size = -1;
3658 unsigned int expected_align = 0;
3659 unsigned HOST_WIDE_INT min_size;
3660 unsigned HOST_WIDE_INT max_size;
3661 unsigned HOST_WIDE_INT probable_max_size;
3663 /* If DEST is not a pointer type, call the normal function. */
3664 if (dest_align == 0)
3665 return NULL_RTX;
3667 /* If either SRC is not a pointer type, don't do this
3668 operation in-line. */
3669 if (src_align == 0)
3670 return NULL_RTX;
3672 if (currently_expanding_gimple_stmt)
3673 stringop_block_profile (currently_expanding_gimple_stmt,
3674 &expected_align, &expected_size);
3676 if (expected_align < dest_align)
3677 expected_align = dest_align;
3678 dest_mem = get_memory_rtx (dest, len);
3679 set_mem_align (dest_mem, dest_align);
3680 len_rtx = expand_normal (len);
3681 determine_block_size (len, len_rtx, &min_size, &max_size,
3682 &probable_max_size);
3683 src_str = c_getstr (src);
3685 /* If SRC is a string constant and block move would be done
3686 by pieces, we can avoid loading the string from memory
3687 and only stored the computed constants. */
3688 if (src_str
3689 && CONST_INT_P (len_rtx)
3690 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3691 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3692 CONST_CAST (char *, src_str),
3693 dest_align, false))
3695 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3696 builtin_memcpy_read_str,
3697 CONST_CAST (char *, src_str),
3698 dest_align, false, endp);
3699 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3700 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3701 return dest_mem;
3704 src_mem = get_memory_rtx (src, len);
3705 set_mem_align (src_mem, src_align);
3707 /* Copy word part most expediently. */
3708 enum block_op_methods method = BLOCK_OP_NORMAL;
3709 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3710 method = BLOCK_OP_TAILCALL;
3711 if (endp == 1 && target != const0_rtx)
3712 method = BLOCK_OP_NO_LIBCALL_RET;
3713 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3714 expected_align, expected_size,
3715 min_size, max_size, probable_max_size);
3716 if (dest_addr == pc_rtx)
3717 return NULL_RTX;
3719 if (dest_addr == 0)
3721 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3722 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3725 if (endp && target != const0_rtx)
3727 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3728 /* stpcpy pointer to last byte. */
3729 if (endp == 2)
3730 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3733 return dest_addr;
3736 static rtx
3737 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3738 rtx target, tree orig_exp, int endp)
3740 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3741 endp);
3744 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3745 we failed, the caller should emit a normal call, otherwise try to
3746 get the result in TARGET, if convenient. If ENDP is 0 return the
3747 destination pointer, if ENDP is 1 return the end pointer ala
3748 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3749 stpcpy. */
3751 static rtx
3752 expand_movstr (tree dest, tree src, rtx target, int endp)
3754 struct expand_operand ops[3];
3755 rtx dest_mem;
3756 rtx src_mem;
3758 if (!targetm.have_movstr ())
3759 return NULL_RTX;
3761 dest_mem = get_memory_rtx (dest, NULL);
3762 src_mem = get_memory_rtx (src, NULL);
3763 if (!endp)
3765 target = force_reg (Pmode, XEXP (dest_mem, 0));
3766 dest_mem = replace_equiv_address (dest_mem, target);
3769 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3770 create_fixed_operand (&ops[1], dest_mem);
3771 create_fixed_operand (&ops[2], src_mem);
3772 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3773 return NULL_RTX;
3775 if (endp && target != const0_rtx)
3777 target = ops[0].value;
3778 /* movstr is supposed to set end to the address of the NUL
3779 terminator. If the caller requested a mempcpy-like return value,
3780 adjust it. */
3781 if (endp == 1)
3783 rtx tem = plus_constant (GET_MODE (target),
3784 gen_lowpart (GET_MODE (target), target), 1);
3785 emit_move_insn (target, force_operand (tem, NULL_RTX));
3788 return target;
3791 /* Do some very basic size validation of a call to the strcpy builtin
3792 given by EXP. Return NULL_RTX to have the built-in expand to a call
3793 to the library function. */
3795 static rtx
3796 expand_builtin_strcat (tree exp, rtx)
3798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3799 || !warn_stringop_overflow)
3800 return NULL_RTX;
3802 tree dest = CALL_EXPR_ARG (exp, 0);
3803 tree src = CALL_EXPR_ARG (exp, 1);
3805 /* There is no way here to determine the length of the string in
3806 the destination to which the SRC string is being appended so
3807 just diagnose cases when the souce string is longer than
3808 the destination object. */
3810 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3812 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3813 destsize);
3815 return NULL_RTX;
3818 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3819 NULL_RTX if we failed the caller should emit a normal call, otherwise
3820 try to get the result in TARGET, if convenient (and in mode MODE if that's
3821 convenient). */
3823 static rtx
3824 expand_builtin_strcpy (tree exp, rtx target)
3826 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
3832 if (warn_stringop_overflow)
3834 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3835 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3836 src, destsize);
3839 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3841 /* Check to see if the argument was declared attribute nonstring
3842 and if so, issue a warning since at this point it's not known
3843 to be nul-terminated. */
3844 tree fndecl = get_callee_fndecl (exp);
3845 maybe_warn_nonstring_arg (fndecl, exp);
3846 return ret;
3849 return NULL_RTX;
3852 /* Helper function to do the actual work for expand_builtin_strcpy. The
3853 arguments to the builtin_strcpy call DEST and SRC are broken out
3854 so that this can also be called without constructing an actual CALL_EXPR.
3855 The other arguments and return value are the same as for
3856 expand_builtin_strcpy. */
3858 static rtx
3859 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3861 return expand_movstr (dest, src, target, /*endp=*/0);
3864 /* Expand a call EXP to the stpcpy builtin.
3865 Return NULL_RTX if we failed the caller should emit a normal call,
3866 otherwise try to get the result in TARGET, if convenient (and in
3867 mode MODE if that's convenient). */
3869 static rtx
3870 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3872 tree dst, src;
3873 location_t loc = EXPR_LOCATION (exp);
3875 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3876 return NULL_RTX;
3878 dst = CALL_EXPR_ARG (exp, 0);
3879 src = CALL_EXPR_ARG (exp, 1);
3881 if (warn_stringop_overflow)
3883 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3884 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3885 src, destsize);
3888 /* If return value is ignored, transform stpcpy into strcpy. */
3889 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3891 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3892 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3893 return expand_expr (result, target, mode, EXPAND_NORMAL);
3895 else
3897 tree len, lenp1;
3898 rtx ret;
3900 /* Ensure we get an actual string whose length can be evaluated at
3901 compile-time, not an expression containing a string. This is
3902 because the latter will potentially produce pessimized code
3903 when used to produce the return value. */
3904 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3905 return expand_movstr (dst, src, target, /*endp=*/2);
3907 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3908 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3909 target, exp, /*endp=*/2);
3911 if (ret)
3912 return ret;
3914 if (TREE_CODE (len) == INTEGER_CST)
3916 rtx len_rtx = expand_normal (len);
3918 if (CONST_INT_P (len_rtx))
3920 ret = expand_builtin_strcpy_args (dst, src, target);
3922 if (ret)
3924 if (! target)
3926 if (mode != VOIDmode)
3927 target = gen_reg_rtx (mode);
3928 else
3929 target = gen_reg_rtx (GET_MODE (ret));
3931 if (GET_MODE (target) != GET_MODE (ret))
3932 ret = gen_lowpart (GET_MODE (target), ret);
3934 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3935 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3936 gcc_assert (ret);
3938 return target;
3943 return expand_movstr (dst, src, target, /*endp=*/2);
3947 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3948 arguments while being careful to avoid duplicate warnings (which could
3949 be issued if the expander were to expand the call, resulting in it
3950 being emitted in expand_call(). */
3952 static rtx
3953 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3955 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3957 /* The call has been successfully expanded. Check for nonstring
3958 arguments and issue warnings as appropriate. */
3959 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3960 return ret;
3963 return NULL_RTX;
3966 /* Check a call EXP to the stpncpy built-in for validity.
3967 Return NULL_RTX on both success and failure. */
3969 static rtx
3970 expand_builtin_stpncpy (tree exp, rtx)
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3974 || !warn_stringop_overflow)
3975 return NULL_RTX;
3977 /* The source and destination of the call. */
3978 tree dest = CALL_EXPR_ARG (exp, 0);
3979 tree src = CALL_EXPR_ARG (exp, 1);
3981 /* The exact number of bytes to write (not the maximum). */
3982 tree len = CALL_EXPR_ARG (exp, 2);
3984 /* The size of the destination object. */
3985 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3987 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3989 return NULL_RTX;
3992 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3993 bytes from constant string DATA + OFFSET and return it as target
3994 constant. */
3997 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3998 scalar_int_mode mode)
4000 const char *str = (const char *) data;
4002 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4003 return const0_rtx;
4005 return c_readstr (str + offset, mode);
4008 /* Helper to check the sizes of sequences and the destination of calls
4009 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4010 success (no overflow or invalid sizes), false otherwise. */
4012 static bool
4013 check_strncat_sizes (tree exp, tree objsize)
4015 tree dest = CALL_EXPR_ARG (exp, 0);
4016 tree src = CALL_EXPR_ARG (exp, 1);
4017 tree maxread = CALL_EXPR_ARG (exp, 2);
4019 /* Try to determine the range of lengths that the source expression
4020 refers to. */
4021 tree lenrange[2];
4022 get_range_strlen (src, lenrange);
4024 /* Try to verify that the destination is big enough for the shortest
4025 string. */
4027 if (!objsize && warn_stringop_overflow)
4029 /* If it hasn't been provided by __strncat_chk, try to determine
4030 the size of the destination object into which the source is
4031 being copied. */
4032 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4035 /* Add one for the terminating nul. */
4036 tree srclen = (lenrange[0]
4037 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4038 size_one_node)
4039 : NULL_TREE);
4041 /* The strncat function copies at most MAXREAD bytes and always appends
4042 the terminating nul so the specified upper bound should never be equal
4043 to (or greater than) the size of the destination. */
4044 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4045 && tree_int_cst_equal (objsize, maxread))
4047 location_t loc = tree_nonartificial_location (exp);
4048 loc = expansion_point_location_if_in_system_header (loc);
4050 warning_at (loc, OPT_Wstringop_overflow_,
4051 "%K%qD specified bound %E equals destination size",
4052 exp, get_callee_fndecl (exp), maxread);
4054 return false;
4057 if (!srclen
4058 || (maxread && tree_fits_uhwi_p (maxread)
4059 && tree_fits_uhwi_p (srclen)
4060 && tree_int_cst_lt (maxread, srclen)))
4061 srclen = maxread;
4063 /* The number of bytes to write is LEN but check_access will also
4064 check SRCLEN if LEN's value isn't known. */
4065 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4066 objsize);
4069 /* Similar to expand_builtin_strcat, do some very basic size validation
4070 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4071 the built-in expand to a call to the library function. */
4073 static rtx
4074 expand_builtin_strncat (tree exp, rtx)
4076 if (!validate_arglist (exp,
4077 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4078 || !warn_stringop_overflow)
4079 return NULL_RTX;
4081 tree dest = CALL_EXPR_ARG (exp, 0);
4082 tree src = CALL_EXPR_ARG (exp, 1);
4083 /* The upper bound on the number of bytes to write. */
4084 tree maxread = CALL_EXPR_ARG (exp, 2);
4085 /* The length of the source sequence. */
4086 tree slen = c_strlen (src, 1);
4088 /* Try to determine the range of lengths that the source expression
4089 refers to. */
4090 tree lenrange[2];
4091 if (slen)
4092 lenrange[0] = lenrange[1] = slen;
4093 else
4094 get_range_strlen (src, lenrange);
4096 /* Try to verify that the destination is big enough for the shortest
4097 string. First try to determine the size of the destination object
4098 into which the source is being copied. */
4099 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4101 /* Add one for the terminating nul. */
4102 tree srclen = (lenrange[0]
4103 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4104 size_one_node)
4105 : NULL_TREE);
4107 /* The strncat function copies at most MAXREAD bytes and always appends
4108 the terminating nul so the specified upper bound should never be equal
4109 to (or greater than) the size of the destination. */
4110 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4111 && tree_int_cst_equal (destsize, maxread))
4113 location_t loc = tree_nonartificial_location (exp);
4114 loc = expansion_point_location_if_in_system_header (loc);
4116 warning_at (loc, OPT_Wstringop_overflow_,
4117 "%K%qD specified bound %E equals destination size",
4118 exp, get_callee_fndecl (exp), maxread);
4120 return NULL_RTX;
4123 if (!srclen
4124 || (maxread && tree_fits_uhwi_p (maxread)
4125 && tree_fits_uhwi_p (srclen)
4126 && tree_int_cst_lt (maxread, srclen)))
4127 srclen = maxread;
4129 /* The number of bytes to write is SRCLEN. */
4130 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4132 return NULL_RTX;
4135 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4136 NULL_RTX if we failed the caller should emit a normal call. */
4138 static rtx
4139 expand_builtin_strncpy (tree exp, rtx target)
4141 location_t loc = EXPR_LOCATION (exp);
4143 if (validate_arglist (exp,
4144 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4146 tree dest = CALL_EXPR_ARG (exp, 0);
4147 tree src = CALL_EXPR_ARG (exp, 1);
4148 /* The number of bytes to write (not the maximum). */
4149 tree len = CALL_EXPR_ARG (exp, 2);
4150 /* The length of the source sequence. */
4151 tree slen = c_strlen (src, 1);
4153 if (warn_stringop_overflow)
4155 tree destsize = compute_objsize (dest,
4156 warn_stringop_overflow - 1);
4158 /* The number of bytes to write is LEN but check_access will also
4159 check SLEN if LEN's value isn't known. */
4160 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4161 destsize);
4164 /* We must be passed a constant len and src parameter. */
4165 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4166 return NULL_RTX;
4168 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4170 /* We're required to pad with trailing zeros if the requested
4171 len is greater than strlen(s2)+1. In that case try to
4172 use store_by_pieces, if it fails, punt. */
4173 if (tree_int_cst_lt (slen, len))
4175 unsigned int dest_align = get_pointer_alignment (dest);
4176 const char *p = c_getstr (src);
4177 rtx dest_mem;
4179 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4180 || !can_store_by_pieces (tree_to_uhwi (len),
4181 builtin_strncpy_read_str,
4182 CONST_CAST (char *, p),
4183 dest_align, false))
4184 return NULL_RTX;
4186 dest_mem = get_memory_rtx (dest, len);
4187 store_by_pieces (dest_mem, tree_to_uhwi (len),
4188 builtin_strncpy_read_str,
4189 CONST_CAST (char *, p), dest_align, false, 0);
4190 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4191 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4192 return dest_mem;
4195 return NULL_RTX;
4198 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4199 bytes from constant string DATA + OFFSET and return it as target
4200 constant. */
4203 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4204 scalar_int_mode mode)
4206 const char *c = (const char *) data;
4207 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4209 memset (p, *c, GET_MODE_SIZE (mode));
4211 return c_readstr (p, mode);
4214 /* Callback routine for store_by_pieces. Return the RTL of a register
4215 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4216 char value given in the RTL register data. For example, if mode is
4217 4 bytes wide, return the RTL for 0x01010101*data. */
4219 static rtx
4220 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4221 scalar_int_mode mode)
4223 rtx target, coeff;
4224 size_t size;
4225 char *p;
4227 size = GET_MODE_SIZE (mode);
4228 if (size == 1)
4229 return (rtx) data;
4231 p = XALLOCAVEC (char, size);
4232 memset (p, 1, size);
4233 coeff = c_readstr (p, mode);
4235 target = convert_to_mode (mode, (rtx) data, 1);
4236 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4237 return force_reg (mode, target);
4240 /* Expand expression EXP, which is a call to the memset builtin. Return
4241 NULL_RTX if we failed the caller should emit a normal call, otherwise
4242 try to get the result in TARGET, if convenient (and in mode MODE if that's
4243 convenient). */
4245 static rtx
4246 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4248 if (!validate_arglist (exp,
4249 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4250 return NULL_RTX;
4252 tree dest = CALL_EXPR_ARG (exp, 0);
4253 tree val = CALL_EXPR_ARG (exp, 1);
4254 tree len = CALL_EXPR_ARG (exp, 2);
4256 check_memop_access (exp, dest, NULL_TREE, len);
4258 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4261 /* Helper function to do the actual work for expand_builtin_memset. The
4262 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4263 so that this can also be called without constructing an actual CALL_EXPR.
4264 The other arguments and return value are the same as for
4265 expand_builtin_memset. */
4267 static rtx
4268 expand_builtin_memset_args (tree dest, tree val, tree len,
4269 rtx target, machine_mode mode, tree orig_exp)
4271 tree fndecl, fn;
4272 enum built_in_function fcode;
4273 machine_mode val_mode;
4274 char c;
4275 unsigned int dest_align;
4276 rtx dest_mem, dest_addr, len_rtx;
4277 HOST_WIDE_INT expected_size = -1;
4278 unsigned int expected_align = 0;
4279 unsigned HOST_WIDE_INT min_size;
4280 unsigned HOST_WIDE_INT max_size;
4281 unsigned HOST_WIDE_INT probable_max_size;
4283 dest_align = get_pointer_alignment (dest);
4285 /* If DEST is not a pointer type, don't do this operation in-line. */
4286 if (dest_align == 0)
4287 return NULL_RTX;
4289 if (currently_expanding_gimple_stmt)
4290 stringop_block_profile (currently_expanding_gimple_stmt,
4291 &expected_align, &expected_size);
4293 if (expected_align < dest_align)
4294 expected_align = dest_align;
4296 /* If the LEN parameter is zero, return DEST. */
4297 if (integer_zerop (len))
4299 /* Evaluate and ignore VAL in case it has side-effects. */
4300 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4301 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4304 /* Stabilize the arguments in case we fail. */
4305 dest = builtin_save_expr (dest);
4306 val = builtin_save_expr (val);
4307 len = builtin_save_expr (len);
4309 len_rtx = expand_normal (len);
4310 determine_block_size (len, len_rtx, &min_size, &max_size,
4311 &probable_max_size);
4312 dest_mem = get_memory_rtx (dest, len);
4313 val_mode = TYPE_MODE (unsigned_char_type_node);
4315 if (TREE_CODE (val) != INTEGER_CST)
4317 rtx val_rtx;
4319 val_rtx = expand_normal (val);
4320 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4322 /* Assume that we can memset by pieces if we can store
4323 * the coefficients by pieces (in the required modes).
4324 * We can't pass builtin_memset_gen_str as that emits RTL. */
4325 c = 1;
4326 if (tree_fits_uhwi_p (len)
4327 && can_store_by_pieces (tree_to_uhwi (len),
4328 builtin_memset_read_str, &c, dest_align,
4329 true))
4331 val_rtx = force_reg (val_mode, val_rtx);
4332 store_by_pieces (dest_mem, tree_to_uhwi (len),
4333 builtin_memset_gen_str, val_rtx, dest_align,
4334 true, 0);
4336 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4337 dest_align, expected_align,
4338 expected_size, min_size, max_size,
4339 probable_max_size))
4340 goto do_libcall;
4342 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4343 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4344 return dest_mem;
4347 if (target_char_cast (val, &c))
4348 goto do_libcall;
4350 if (c)
4352 if (tree_fits_uhwi_p (len)
4353 && can_store_by_pieces (tree_to_uhwi (len),
4354 builtin_memset_read_str, &c, dest_align,
4355 true))
4356 store_by_pieces (dest_mem, tree_to_uhwi (len),
4357 builtin_memset_read_str, &c, dest_align, true, 0);
4358 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4359 gen_int_mode (c, val_mode),
4360 dest_align, expected_align,
4361 expected_size, min_size, max_size,
4362 probable_max_size))
4363 goto do_libcall;
4365 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4366 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4367 return dest_mem;
4370 set_mem_align (dest_mem, dest_align);
4371 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4372 CALL_EXPR_TAILCALL (orig_exp)
4373 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4374 expected_align, expected_size,
4375 min_size, max_size,
4376 probable_max_size);
4378 if (dest_addr == 0)
4380 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4381 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4384 return dest_addr;
4386 do_libcall:
4387 fndecl = get_callee_fndecl (orig_exp);
4388 fcode = DECL_FUNCTION_CODE (fndecl);
4389 if (fcode == BUILT_IN_MEMSET)
4390 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4391 dest, val, len);
4392 else if (fcode == BUILT_IN_BZERO)
4393 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4394 dest, len);
4395 else
4396 gcc_unreachable ();
4397 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4398 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4399 return expand_call (fn, target, target == const0_rtx);
4402 /* Expand expression EXP, which is a call to the bzero builtin. Return
4403 NULL_RTX if we failed the caller should emit a normal call. */
4405 static rtx
4406 expand_builtin_bzero (tree exp)
4408 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4409 return NULL_RTX;
4411 tree dest = CALL_EXPR_ARG (exp, 0);
4412 tree size = CALL_EXPR_ARG (exp, 1);
4414 check_memop_access (exp, dest, NULL_TREE, size);
4416 /* New argument list transforming bzero(ptr x, int y) to
4417 memset(ptr x, int 0, size_t y). This is done this way
4418 so that if it isn't expanded inline, we fallback to
4419 calling bzero instead of memset. */
4421 location_t loc = EXPR_LOCATION (exp);
4423 return expand_builtin_memset_args (dest, integer_zero_node,
4424 fold_convert_loc (loc,
4425 size_type_node, size),
4426 const0_rtx, VOIDmode, exp);
4429 /* Try to expand cmpstr operation ICODE with the given operands.
4430 Return the result rtx on success, otherwise return null. */
4432 static rtx
4433 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4434 HOST_WIDE_INT align)
4436 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4438 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4439 target = NULL_RTX;
4441 struct expand_operand ops[4];
4442 create_output_operand (&ops[0], target, insn_mode);
4443 create_fixed_operand (&ops[1], arg1_rtx);
4444 create_fixed_operand (&ops[2], arg2_rtx);
4445 create_integer_operand (&ops[3], align);
4446 if (maybe_expand_insn (icode, 4, ops))
4447 return ops[0].value;
4448 return NULL_RTX;
4451 /* Expand expression EXP, which is a call to the memcmp built-in function.
4452 Return NULL_RTX if we failed and the caller should emit a normal call,
4453 otherwise try to get the result in TARGET, if convenient.
4454 RESULT_EQ is true if we can relax the returned value to be either zero
4455 or nonzero, without caring about the sign. */
4457 static rtx
4458 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4460 if (!validate_arglist (exp,
4461 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4462 return NULL_RTX;
4464 tree arg1 = CALL_EXPR_ARG (exp, 0);
4465 tree arg2 = CALL_EXPR_ARG (exp, 1);
4466 tree len = CALL_EXPR_ARG (exp, 2);
4467 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4468 bool no_overflow = true;
4470 /* Diagnose calls where the specified length exceeds the size of either
4471 object. */
4472 tree size = compute_objsize (arg1, 0);
4473 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4474 len, /*maxread=*/NULL_TREE, size,
4475 /*objsize=*/NULL_TREE);
4476 if (no_overflow)
4478 size = compute_objsize (arg2, 0);
4479 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4480 len, /*maxread=*/NULL_TREE, size,
4481 /*objsize=*/NULL_TREE);
4484 /* If the specified length exceeds the size of either object,
4485 call the function. */
4486 if (!no_overflow)
4487 return NULL_RTX;
4489 /* Due to the performance benefit, always inline the calls first
4490 when result_eq is false. */
4491 rtx result = NULL_RTX;
4493 if (!result_eq && fcode != BUILT_IN_BCMP)
4495 result = inline_expand_builtin_string_cmp (exp, target);
4496 if (result)
4497 return result;
4500 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4501 location_t loc = EXPR_LOCATION (exp);
4503 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4504 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4506 /* If we don't have POINTER_TYPE, call the function. */
4507 if (arg1_align == 0 || arg2_align == 0)
4508 return NULL_RTX;
4510 rtx arg1_rtx = get_memory_rtx (arg1, len);
4511 rtx arg2_rtx = get_memory_rtx (arg2, len);
4512 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4514 /* Set MEM_SIZE as appropriate. */
4515 if (CONST_INT_P (len_rtx))
4517 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4518 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4521 by_pieces_constfn constfn = NULL;
4523 const char *src_str = c_getstr (arg2);
4524 if (result_eq && src_str == NULL)
4526 src_str = c_getstr (arg1);
4527 if (src_str != NULL)
4528 std::swap (arg1_rtx, arg2_rtx);
4531 /* If SRC is a string constant and block move would be done
4532 by pieces, we can avoid loading the string from memory
4533 and only stored the computed constants. */
4534 if (src_str
4535 && CONST_INT_P (len_rtx)
4536 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4537 constfn = builtin_memcpy_read_str;
4539 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4540 TREE_TYPE (len), target,
4541 result_eq, constfn,
4542 CONST_CAST (char *, src_str));
4544 if (result)
4546 /* Return the value in the proper mode for this function. */
4547 if (GET_MODE (result) == mode)
4548 return result;
4550 if (target != 0)
4552 convert_move (target, result, 0);
4553 return target;
4556 return convert_to_mode (mode, result, 0);
4559 return NULL_RTX;
4562 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4563 if we failed the caller should emit a normal call, otherwise try to get
4564 the result in TARGET, if convenient. */
4566 static rtx
4567 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4569 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4570 return NULL_RTX;
4572 /* Due to the performance benefit, always inline the calls first. */
4573 rtx result = NULL_RTX;
4574 result = inline_expand_builtin_string_cmp (exp, target);
4575 if (result)
4576 return result;
4578 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4579 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4580 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4581 return NULL_RTX;
4583 tree arg1 = CALL_EXPR_ARG (exp, 0);
4584 tree arg2 = CALL_EXPR_ARG (exp, 1);
4586 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4587 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4589 /* If we don't have POINTER_TYPE, call the function. */
4590 if (arg1_align == 0 || arg2_align == 0)
4591 return NULL_RTX;
4593 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4594 arg1 = builtin_save_expr (arg1);
4595 arg2 = builtin_save_expr (arg2);
4597 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4598 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4600 /* Try to call cmpstrsi. */
4601 if (cmpstr_icode != CODE_FOR_nothing)
4602 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4603 MIN (arg1_align, arg2_align));
4605 /* Try to determine at least one length and call cmpstrnsi. */
4606 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4608 tree len;
4609 rtx arg3_rtx;
4611 tree len1 = c_strlen (arg1, 1);
4612 tree len2 = c_strlen (arg2, 1);
4614 if (len1)
4615 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4616 if (len2)
4617 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4619 /* If we don't have a constant length for the first, use the length
4620 of the second, if we know it. We don't require a constant for
4621 this case; some cost analysis could be done if both are available
4622 but neither is constant. For now, assume they're equally cheap,
4623 unless one has side effects. If both strings have constant lengths,
4624 use the smaller. */
4626 if (!len1)
4627 len = len2;
4628 else if (!len2)
4629 len = len1;
4630 else if (TREE_SIDE_EFFECTS (len1))
4631 len = len2;
4632 else if (TREE_SIDE_EFFECTS (len2))
4633 len = len1;
4634 else if (TREE_CODE (len1) != INTEGER_CST)
4635 len = len2;
4636 else if (TREE_CODE (len2) != INTEGER_CST)
4637 len = len1;
4638 else if (tree_int_cst_lt (len1, len2))
4639 len = len1;
4640 else
4641 len = len2;
4643 /* If both arguments have side effects, we cannot optimize. */
4644 if (len && !TREE_SIDE_EFFECTS (len))
4646 arg3_rtx = expand_normal (len);
4647 result = expand_cmpstrn_or_cmpmem
4648 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4649 arg3_rtx, MIN (arg1_align, arg2_align));
4653 tree fndecl = get_callee_fndecl (exp);
4654 if (result)
4656 /* Check to see if the argument was declared attribute nonstring
4657 and if so, issue a warning since at this point it's not known
4658 to be nul-terminated. */
4659 maybe_warn_nonstring_arg (fndecl, exp);
4661 /* Return the value in the proper mode for this function. */
4662 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4663 if (GET_MODE (result) == mode)
4664 return result;
4665 if (target == 0)
4666 return convert_to_mode (mode, result, 0);
4667 convert_move (target, result, 0);
4668 return target;
4671 /* Expand the library call ourselves using a stabilized argument
4672 list to avoid re-evaluating the function's arguments twice. */
4673 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4674 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4675 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4676 return expand_call (fn, target, target == const0_rtx);
4679 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4680 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4681 the result in TARGET, if convenient. */
4683 static rtx
4684 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4685 ATTRIBUTE_UNUSED machine_mode mode)
4687 if (!validate_arglist (exp,
4688 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4689 return NULL_RTX;
4691 /* Due to the performance benefit, always inline the calls first. */
4692 rtx result = NULL_RTX;
4693 result = inline_expand_builtin_string_cmp (exp, target);
4694 if (result)
4695 return result;
4697 /* If c_strlen can determine an expression for one of the string
4698 lengths, and it doesn't have side effects, then emit cmpstrnsi
4699 using length MIN(strlen(string)+1, arg3). */
4700 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4701 if (cmpstrn_icode == CODE_FOR_nothing)
4702 return NULL_RTX;
4704 tree len;
4706 tree arg1 = CALL_EXPR_ARG (exp, 0);
4707 tree arg2 = CALL_EXPR_ARG (exp, 1);
4708 tree arg3 = CALL_EXPR_ARG (exp, 2);
4710 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4711 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4713 tree len1 = c_strlen (arg1, 1);
4714 tree len2 = c_strlen (arg2, 1);
4716 location_t loc = EXPR_LOCATION (exp);
4718 if (len1)
4719 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4720 if (len2)
4721 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4723 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4725 /* If we don't have a constant length for the first, use the length
4726 of the second, if we know it. If neither string is constant length,
4727 use the given length argument. We don't require a constant for
4728 this case; some cost analysis could be done if both are available
4729 but neither is constant. For now, assume they're equally cheap,
4730 unless one has side effects. If both strings have constant lengths,
4731 use the smaller. */
4733 if (!len1 && !len2)
4734 len = len3;
4735 else if (!len1)
4736 len = len2;
4737 else if (!len2)
4738 len = len1;
4739 else if (TREE_SIDE_EFFECTS (len1))
4740 len = len2;
4741 else if (TREE_SIDE_EFFECTS (len2))
4742 len = len1;
4743 else if (TREE_CODE (len1) != INTEGER_CST)
4744 len = len2;
4745 else if (TREE_CODE (len2) != INTEGER_CST)
4746 len = len1;
4747 else if (tree_int_cst_lt (len1, len2))
4748 len = len1;
4749 else
4750 len = len2;
4752 /* If we are not using the given length, we must incorporate it here.
4753 The actual new length parameter will be MIN(len,arg3) in this case. */
4754 if (len != len3)
4755 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4756 rtx arg1_rtx = get_memory_rtx (arg1, len);
4757 rtx arg2_rtx = get_memory_rtx (arg2, len);
4758 rtx arg3_rtx = expand_normal (len);
4759 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4760 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4761 MIN (arg1_align, arg2_align));
4763 tree fndecl = get_callee_fndecl (exp);
4764 if (result)
4766 /* Check to see if the argument was declared attribute nonstring
4767 and if so, issue a warning since at this point it's not known
4768 to be nul-terminated. */
4769 maybe_warn_nonstring_arg (fndecl, exp);
4771 /* Return the value in the proper mode for this function. */
4772 mode = TYPE_MODE (TREE_TYPE (exp));
4773 if (GET_MODE (result) == mode)
4774 return result;
4775 if (target == 0)
4776 return convert_to_mode (mode, result, 0);
4777 convert_move (target, result, 0);
4778 return target;
4781 /* Expand the library call ourselves using a stabilized argument
4782 list to avoid re-evaluating the function's arguments twice. */
4783 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4784 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4785 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4786 return expand_call (fn, target, target == const0_rtx);
4789 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4790 if that's convenient. */
4793 expand_builtin_saveregs (void)
4795 rtx val;
4796 rtx_insn *seq;
4798 /* Don't do __builtin_saveregs more than once in a function.
4799 Save the result of the first call and reuse it. */
4800 if (saveregs_value != 0)
4801 return saveregs_value;
4803 /* When this function is called, it means that registers must be
4804 saved on entry to this function. So we migrate the call to the
4805 first insn of this function. */
4807 start_sequence ();
4809 /* Do whatever the machine needs done in this case. */
4810 val = targetm.calls.expand_builtin_saveregs ();
4812 seq = get_insns ();
4813 end_sequence ();
4815 saveregs_value = val;
4817 /* Put the insns after the NOTE that starts the function. If this
4818 is inside a start_sequence, make the outer-level insn chain current, so
4819 the code is placed at the start of the function. */
4820 push_topmost_sequence ();
4821 emit_insn_after (seq, entry_of_function ());
4822 pop_topmost_sequence ();
4824 return val;
4827 /* Expand a call to __builtin_next_arg. */
4829 static rtx
4830 expand_builtin_next_arg (void)
4832 /* Checking arguments is already done in fold_builtin_next_arg
4833 that must be called before this function. */
4834 return expand_binop (ptr_mode, add_optab,
4835 crtl->args.internal_arg_pointer,
4836 crtl->args.arg_offset_rtx,
4837 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4840 /* Make it easier for the backends by protecting the valist argument
4841 from multiple evaluations. */
4843 static tree
4844 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4846 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4848 /* The current way of determining the type of valist is completely
4849 bogus. We should have the information on the va builtin instead. */
4850 if (!vatype)
4851 vatype = targetm.fn_abi_va_list (cfun->decl);
4853 if (TREE_CODE (vatype) == ARRAY_TYPE)
4855 if (TREE_SIDE_EFFECTS (valist))
4856 valist = save_expr (valist);
4858 /* For this case, the backends will be expecting a pointer to
4859 vatype, but it's possible we've actually been given an array
4860 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4861 So fix it. */
4862 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4864 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4865 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4868 else
4870 tree pt = build_pointer_type (vatype);
4872 if (! needs_lvalue)
4874 if (! TREE_SIDE_EFFECTS (valist))
4875 return valist;
4877 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4878 TREE_SIDE_EFFECTS (valist) = 1;
4881 if (TREE_SIDE_EFFECTS (valist))
4882 valist = save_expr (valist);
4883 valist = fold_build2_loc (loc, MEM_REF,
4884 vatype, valist, build_int_cst (pt, 0));
4887 return valist;
4890 /* The "standard" definition of va_list is void*. */
4892 tree
4893 std_build_builtin_va_list (void)
4895 return ptr_type_node;
4898 /* The "standard" abi va_list is va_list_type_node. */
4900 tree
4901 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4903 return va_list_type_node;
4906 /* The "standard" type of va_list is va_list_type_node. */
4908 tree
4909 std_canonical_va_list_type (tree type)
4911 tree wtype, htype;
4913 wtype = va_list_type_node;
4914 htype = type;
4916 if (TREE_CODE (wtype) == ARRAY_TYPE)
4918 /* If va_list is an array type, the argument may have decayed
4919 to a pointer type, e.g. by being passed to another function.
4920 In that case, unwrap both types so that we can compare the
4921 underlying records. */
4922 if (TREE_CODE (htype) == ARRAY_TYPE
4923 || POINTER_TYPE_P (htype))
4925 wtype = TREE_TYPE (wtype);
4926 htype = TREE_TYPE (htype);
4929 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4930 return va_list_type_node;
4932 return NULL_TREE;
4935 /* The "standard" implementation of va_start: just assign `nextarg' to
4936 the variable. */
4938 void
4939 std_expand_builtin_va_start (tree valist, rtx nextarg)
4941 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4942 convert_move (va_r, nextarg, 0);
4945 /* Expand EXP, a call to __builtin_va_start. */
4947 static rtx
4948 expand_builtin_va_start (tree exp)
4950 rtx nextarg;
4951 tree valist;
4952 location_t loc = EXPR_LOCATION (exp);
4954 if (call_expr_nargs (exp) < 2)
4956 error_at (loc, "too few arguments to function %<va_start%>");
4957 return const0_rtx;
4960 if (fold_builtin_next_arg (exp, true))
4961 return const0_rtx;
4963 nextarg = expand_builtin_next_arg ();
4964 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4966 if (targetm.expand_builtin_va_start)
4967 targetm.expand_builtin_va_start (valist, nextarg);
4968 else
4969 std_expand_builtin_va_start (valist, nextarg);
4971 return const0_rtx;
4974 /* Expand EXP, a call to __builtin_va_end. */
4976 static rtx
4977 expand_builtin_va_end (tree exp)
4979 tree valist = CALL_EXPR_ARG (exp, 0);
4981 /* Evaluate for side effects, if needed. I hate macros that don't
4982 do that. */
4983 if (TREE_SIDE_EFFECTS (valist))
4984 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4986 return const0_rtx;
4989 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4990 builtin rather than just as an assignment in stdarg.h because of the
4991 nastiness of array-type va_list types. */
4993 static rtx
4994 expand_builtin_va_copy (tree exp)
4996 tree dst, src, t;
4997 location_t loc = EXPR_LOCATION (exp);
4999 dst = CALL_EXPR_ARG (exp, 0);
5000 src = CALL_EXPR_ARG (exp, 1);
5002 dst = stabilize_va_list_loc (loc, dst, 1);
5003 src = stabilize_va_list_loc (loc, src, 0);
5005 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5007 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5009 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5010 TREE_SIDE_EFFECTS (t) = 1;
5011 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5013 else
5015 rtx dstb, srcb, size;
5017 /* Evaluate to pointers. */
5018 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5019 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5020 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5021 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5023 dstb = convert_memory_address (Pmode, dstb);
5024 srcb = convert_memory_address (Pmode, srcb);
5026 /* "Dereference" to BLKmode memories. */
5027 dstb = gen_rtx_MEM (BLKmode, dstb);
5028 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5029 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5030 srcb = gen_rtx_MEM (BLKmode, srcb);
5031 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5032 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5034 /* Copy. */
5035 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5038 return const0_rtx;
5041 /* Expand a call to one of the builtin functions __builtin_frame_address or
5042 __builtin_return_address. */
5044 static rtx
5045 expand_builtin_frame_address (tree fndecl, tree exp)
5047 /* The argument must be a nonnegative integer constant.
5048 It counts the number of frames to scan up the stack.
5049 The value is either the frame pointer value or the return
5050 address saved in that frame. */
5051 if (call_expr_nargs (exp) == 0)
5052 /* Warning about missing arg was already issued. */
5053 return const0_rtx;
5054 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5056 error ("invalid argument to %qD", fndecl);
5057 return const0_rtx;
5059 else
5061 /* Number of frames to scan up the stack. */
5062 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5064 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5066 /* Some ports cannot access arbitrary stack frames. */
5067 if (tem == NULL)
5069 warning (0, "unsupported argument to %qD", fndecl);
5070 return const0_rtx;
5073 if (count)
5075 /* Warn since no effort is made to ensure that any frame
5076 beyond the current one exists or can be safely reached. */
5077 warning (OPT_Wframe_address, "calling %qD with "
5078 "a nonzero argument is unsafe", fndecl);
5081 /* For __builtin_frame_address, return what we've got. */
5082 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5083 return tem;
5085 if (!REG_P (tem)
5086 && ! CONSTANT_P (tem))
5087 tem = copy_addr_to_reg (tem);
5088 return tem;
5092 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5093 failed and the caller should emit a normal call. */
5095 static rtx
5096 expand_builtin_alloca (tree exp)
5098 rtx op0;
5099 rtx result;
5100 unsigned int align;
5101 tree fndecl = get_callee_fndecl (exp);
5102 HOST_WIDE_INT max_size;
5103 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5104 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5105 bool valid_arglist
5106 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5107 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5108 VOID_TYPE)
5109 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5110 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5111 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5113 if (!valid_arglist)
5114 return NULL_RTX;
5116 if ((alloca_for_var
5117 && warn_vla_limit >= HOST_WIDE_INT_MAX
5118 && warn_alloc_size_limit < warn_vla_limit)
5119 || (!alloca_for_var
5120 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5121 && warn_alloc_size_limit < warn_alloca_limit
5124 /* -Walloca-larger-than and -Wvla-larger-than settings of
5125 less than HOST_WIDE_INT_MAX override the more general
5126 -Walloc-size-larger-than so unless either of the former
5127 options is smaller than the last one (wchich would imply
5128 that the call was already checked), check the alloca
5129 arguments for overflow. */
5130 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5131 int idx[] = { 0, -1 };
5132 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5135 /* Compute the argument. */
5136 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5138 /* Compute the alignment. */
5139 align = (fcode == BUILT_IN_ALLOCA
5140 ? BIGGEST_ALIGNMENT
5141 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5143 /* Compute the maximum size. */
5144 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5145 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5146 : -1);
5148 /* Allocate the desired space. If the allocation stems from the declaration
5149 of a variable-sized object, it cannot accumulate. */
5150 result
5151 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5152 result = convert_memory_address (ptr_mode, result);
5154 return result;
5157 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5158 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5159 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5160 handle_builtin_stack_restore function. */
5162 static rtx
5163 expand_asan_emit_allocas_unpoison (tree exp)
5165 tree arg0 = CALL_EXPR_ARG (exp, 0);
5166 tree arg1 = CALL_EXPR_ARG (exp, 1);
5167 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5168 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5169 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5170 stack_pointer_rtx, NULL_RTX, 0,
5171 OPTAB_LIB_WIDEN);
5172 off = convert_modes (ptr_mode, Pmode, off, 0);
5173 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5174 OPTAB_LIB_WIDEN);
5175 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5176 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5177 top, ptr_mode, bot, ptr_mode);
5178 return ret;
5181 /* Expand a call to bswap builtin in EXP.
5182 Return NULL_RTX if a normal call should be emitted rather than expanding the
5183 function in-line. If convenient, the result should be placed in TARGET.
5184 SUBTARGET may be used as the target for computing one of EXP's operands. */
5186 static rtx
5187 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5188 rtx subtarget)
5190 tree arg;
5191 rtx op0;
5193 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5194 return NULL_RTX;
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 op0 = expand_expr (arg,
5198 subtarget && GET_MODE (subtarget) == target_mode
5199 ? subtarget : NULL_RTX,
5200 target_mode, EXPAND_NORMAL);
5201 if (GET_MODE (op0) != target_mode)
5202 op0 = convert_to_mode (target_mode, op0, 1);
5204 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5206 gcc_assert (target);
5208 return convert_to_mode (target_mode, target, 1);
5211 /* Expand a call to a unary builtin in EXP.
5212 Return NULL_RTX if a normal call should be emitted rather than expanding the
5213 function in-line. If convenient, the result should be placed in TARGET.
5214 SUBTARGET may be used as the target for computing one of EXP's operands. */
5216 static rtx
5217 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5218 rtx subtarget, optab op_optab)
5220 rtx op0;
5222 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5223 return NULL_RTX;
5225 /* Compute the argument. */
5226 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5227 (subtarget
5228 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5229 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5230 VOIDmode, EXPAND_NORMAL);
5231 /* Compute op, into TARGET if possible.
5232 Set TARGET to wherever the result comes back. */
5233 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5234 op_optab, op0, target, op_optab != clrsb_optab);
5235 gcc_assert (target);
5237 return convert_to_mode (target_mode, target, 0);
5240 /* Expand a call to __builtin_expect. We just return our argument
5241 as the builtin_expect semantic should've been already executed by
5242 tree branch prediction pass. */
5244 static rtx
5245 expand_builtin_expect (tree exp, rtx target)
5247 tree arg;
5249 if (call_expr_nargs (exp) < 2)
5250 return const0_rtx;
5251 arg = CALL_EXPR_ARG (exp, 0);
5253 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5254 /* When guessing was done, the hints should be already stripped away. */
5255 gcc_assert (!flag_guess_branch_prob
5256 || optimize == 0 || seen_error ());
5257 return target;
5260 /* Expand a call to __builtin_expect_with_probability. We just return our
5261 argument as the builtin_expect semantic should've been already executed by
5262 tree branch prediction pass. */
5264 static rtx
5265 expand_builtin_expect_with_probability (tree exp, rtx target)
5267 tree arg;
5269 if (call_expr_nargs (exp) < 3)
5270 return const0_rtx;
5271 arg = CALL_EXPR_ARG (exp, 0);
5273 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5274 /* When guessing was done, the hints should be already stripped away. */
5275 gcc_assert (!flag_guess_branch_prob
5276 || optimize == 0 || seen_error ());
5277 return target;
5281 /* Expand a call to __builtin_assume_aligned. We just return our first
5282 argument as the builtin_assume_aligned semantic should've been already
5283 executed by CCP. */
5285 static rtx
5286 expand_builtin_assume_aligned (tree exp, rtx target)
5288 if (call_expr_nargs (exp) < 2)
5289 return const0_rtx;
5290 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5291 EXPAND_NORMAL);
5292 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5293 && (call_expr_nargs (exp) < 3
5294 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5295 return target;
5298 void
5299 expand_builtin_trap (void)
5301 if (targetm.have_trap ())
5303 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5304 /* For trap insns when not accumulating outgoing args force
5305 REG_ARGS_SIZE note to prevent crossjumping of calls with
5306 different args sizes. */
5307 if (!ACCUMULATE_OUTGOING_ARGS)
5308 add_args_size_note (insn, stack_pointer_delta);
5310 else
5312 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5313 tree call_expr = build_call_expr (fn, 0);
5314 expand_call (call_expr, NULL_RTX, false);
5317 emit_barrier ();
5320 /* Expand a call to __builtin_unreachable. We do nothing except emit
5321 a barrier saying that control flow will not pass here.
5323 It is the responsibility of the program being compiled to ensure
5324 that control flow does never reach __builtin_unreachable. */
5325 static void
5326 expand_builtin_unreachable (void)
5328 emit_barrier ();
5331 /* Expand EXP, a call to fabs, fabsf or fabsl.
5332 Return NULL_RTX if a normal call should be emitted rather than expanding
5333 the function inline. If convenient, the result should be placed
5334 in TARGET. SUBTARGET may be used as the target for computing
5335 the operand. */
5337 static rtx
5338 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5340 machine_mode mode;
5341 tree arg;
5342 rtx op0;
5344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5345 return NULL_RTX;
5347 arg = CALL_EXPR_ARG (exp, 0);
5348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5349 mode = TYPE_MODE (TREE_TYPE (arg));
5350 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5351 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5354 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5355 Return NULL is a normal call should be emitted rather than expanding the
5356 function inline. If convenient, the result should be placed in TARGET.
5357 SUBTARGET may be used as the target for computing the operand. */
5359 static rtx
5360 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5362 rtx op0, op1;
5363 tree arg;
5365 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5366 return NULL_RTX;
5368 arg = CALL_EXPR_ARG (exp, 0);
5369 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5371 arg = CALL_EXPR_ARG (exp, 1);
5372 op1 = expand_normal (arg);
5374 return expand_copysign (op0, op1, target);
5377 /* Expand a call to __builtin___clear_cache. */
5379 static rtx
5380 expand_builtin___clear_cache (tree exp)
5382 if (!targetm.code_for_clear_cache)
5384 #ifdef CLEAR_INSN_CACHE
5385 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5386 does something. Just do the default expansion to a call to
5387 __clear_cache(). */
5388 return NULL_RTX;
5389 #else
5390 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5391 does nothing. There is no need to call it. Do nothing. */
5392 return const0_rtx;
5393 #endif /* CLEAR_INSN_CACHE */
5396 /* We have a "clear_cache" insn, and it will handle everything. */
5397 tree begin, end;
5398 rtx begin_rtx, end_rtx;
5400 /* We must not expand to a library call. If we did, any
5401 fallback library function in libgcc that might contain a call to
5402 __builtin___clear_cache() would recurse infinitely. */
5403 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5405 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5406 return const0_rtx;
5409 if (targetm.have_clear_cache ())
5411 struct expand_operand ops[2];
5413 begin = CALL_EXPR_ARG (exp, 0);
5414 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5416 end = CALL_EXPR_ARG (exp, 1);
5417 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5419 create_address_operand (&ops[0], begin_rtx);
5420 create_address_operand (&ops[1], end_rtx);
5421 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5422 return const0_rtx;
5424 return const0_rtx;
5427 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5429 static rtx
5430 round_trampoline_addr (rtx tramp)
5432 rtx temp, addend, mask;
5434 /* If we don't need too much alignment, we'll have been guaranteed
5435 proper alignment by get_trampoline_type. */
5436 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5437 return tramp;
5439 /* Round address up to desired boundary. */
5440 temp = gen_reg_rtx (Pmode);
5441 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5442 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5444 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5445 temp, 0, OPTAB_LIB_WIDEN);
5446 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5447 temp, 0, OPTAB_LIB_WIDEN);
5449 return tramp;
5452 static rtx
5453 expand_builtin_init_trampoline (tree exp, bool onstack)
5455 tree t_tramp, t_func, t_chain;
5456 rtx m_tramp, r_tramp, r_chain, tmp;
5458 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5459 POINTER_TYPE, VOID_TYPE))
5460 return NULL_RTX;
5462 t_tramp = CALL_EXPR_ARG (exp, 0);
5463 t_func = CALL_EXPR_ARG (exp, 1);
5464 t_chain = CALL_EXPR_ARG (exp, 2);
5466 r_tramp = expand_normal (t_tramp);
5467 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5468 MEM_NOTRAP_P (m_tramp) = 1;
5470 /* If ONSTACK, the TRAMP argument should be the address of a field
5471 within the local function's FRAME decl. Either way, let's see if
5472 we can fill in the MEM_ATTRs for this memory. */
5473 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5474 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5476 /* Creator of a heap trampoline is responsible for making sure the
5477 address is aligned to at least STACK_BOUNDARY. Normally malloc
5478 will ensure this anyhow. */
5479 tmp = round_trampoline_addr (r_tramp);
5480 if (tmp != r_tramp)
5482 m_tramp = change_address (m_tramp, BLKmode, tmp);
5483 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5484 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5487 /* The FUNC argument should be the address of the nested function.
5488 Extract the actual function decl to pass to the hook. */
5489 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5490 t_func = TREE_OPERAND (t_func, 0);
5491 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5493 r_chain = expand_normal (t_chain);
5495 /* Generate insns to initialize the trampoline. */
5496 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5498 if (onstack)
5500 trampolines_created = 1;
5502 if (targetm.calls.custom_function_descriptors != 0)
5503 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5504 "trampoline generated for nested function %qD", t_func);
5507 return const0_rtx;
5510 static rtx
5511 expand_builtin_adjust_trampoline (tree exp)
5513 rtx tramp;
5515 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5516 return NULL_RTX;
5518 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5519 tramp = round_trampoline_addr (tramp);
5520 if (targetm.calls.trampoline_adjust_address)
5521 tramp = targetm.calls.trampoline_adjust_address (tramp);
5523 return tramp;
5526 /* Expand a call to the builtin descriptor initialization routine.
5527 A descriptor is made up of a couple of pointers to the static
5528 chain and the code entry in this order. */
5530 static rtx
5531 expand_builtin_init_descriptor (tree exp)
5533 tree t_descr, t_func, t_chain;
5534 rtx m_descr, r_descr, r_func, r_chain;
5536 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5537 VOID_TYPE))
5538 return NULL_RTX;
5540 t_descr = CALL_EXPR_ARG (exp, 0);
5541 t_func = CALL_EXPR_ARG (exp, 1);
5542 t_chain = CALL_EXPR_ARG (exp, 2);
5544 r_descr = expand_normal (t_descr);
5545 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5546 MEM_NOTRAP_P (m_descr) = 1;
5548 r_func = expand_normal (t_func);
5549 r_chain = expand_normal (t_chain);
5551 /* Generate insns to initialize the descriptor. */
5552 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5553 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5554 POINTER_SIZE / BITS_PER_UNIT), r_func);
5556 return const0_rtx;
5559 /* Expand a call to the builtin descriptor adjustment routine. */
5561 static rtx
5562 expand_builtin_adjust_descriptor (tree exp)
5564 rtx tramp;
5566 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5567 return NULL_RTX;
5569 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5571 /* Unalign the descriptor to allow runtime identification. */
5572 tramp = plus_constant (ptr_mode, tramp,
5573 targetm.calls.custom_function_descriptors);
5575 return force_operand (tramp, NULL_RTX);
5578 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5579 function. The function first checks whether the back end provides
5580 an insn to implement signbit for the respective mode. If not, it
5581 checks whether the floating point format of the value is such that
5582 the sign bit can be extracted. If that is not the case, error out.
5583 EXP is the expression that is a call to the builtin function; if
5584 convenient, the result should be placed in TARGET. */
5585 static rtx
5586 expand_builtin_signbit (tree exp, rtx target)
5588 const struct real_format *fmt;
5589 scalar_float_mode fmode;
5590 scalar_int_mode rmode, imode;
5591 tree arg;
5592 int word, bitpos;
5593 enum insn_code icode;
5594 rtx temp;
5595 location_t loc = EXPR_LOCATION (exp);
5597 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5598 return NULL_RTX;
5600 arg = CALL_EXPR_ARG (exp, 0);
5601 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5602 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5603 fmt = REAL_MODE_FORMAT (fmode);
5605 arg = builtin_save_expr (arg);
5607 /* Expand the argument yielding a RTX expression. */
5608 temp = expand_normal (arg);
5610 /* Check if the back end provides an insn that handles signbit for the
5611 argument's mode. */
5612 icode = optab_handler (signbit_optab, fmode);
5613 if (icode != CODE_FOR_nothing)
5615 rtx_insn *last = get_last_insn ();
5616 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5617 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5618 return target;
5619 delete_insns_since (last);
5622 /* For floating point formats without a sign bit, implement signbit
5623 as "ARG < 0.0". */
5624 bitpos = fmt->signbit_ro;
5625 if (bitpos < 0)
5627 /* But we can't do this if the format supports signed zero. */
5628 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5630 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5631 build_real (TREE_TYPE (arg), dconst0));
5632 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5635 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5637 imode = int_mode_for_mode (fmode).require ();
5638 temp = gen_lowpart (imode, temp);
5640 else
5642 imode = word_mode;
5643 /* Handle targets with different FP word orders. */
5644 if (FLOAT_WORDS_BIG_ENDIAN)
5645 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5646 else
5647 word = bitpos / BITS_PER_WORD;
5648 temp = operand_subword_force (temp, word, fmode);
5649 bitpos = bitpos % BITS_PER_WORD;
5652 /* Force the intermediate word_mode (or narrower) result into a
5653 register. This avoids attempting to create paradoxical SUBREGs
5654 of floating point modes below. */
5655 temp = force_reg (imode, temp);
5657 /* If the bitpos is within the "result mode" lowpart, the operation
5658 can be implement with a single bitwise AND. Otherwise, we need
5659 a right shift and an AND. */
5661 if (bitpos < GET_MODE_BITSIZE (rmode))
5663 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5665 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5666 temp = gen_lowpart (rmode, temp);
5667 temp = expand_binop (rmode, and_optab, temp,
5668 immed_wide_int_const (mask, rmode),
5669 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5671 else
5673 /* Perform a logical right shift to place the signbit in the least
5674 significant bit, then truncate the result to the desired mode
5675 and mask just this bit. */
5676 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5677 temp = gen_lowpart (rmode, temp);
5678 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5679 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5682 return temp;
5685 /* Expand fork or exec calls. TARGET is the desired target of the
5686 call. EXP is the call. FN is the
5687 identificator of the actual function. IGNORE is nonzero if the
5688 value is to be ignored. */
5690 static rtx
5691 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5693 tree id, decl;
5694 tree call;
5696 /* If we are not profiling, just call the function. */
5697 if (!profile_arc_flag)
5698 return NULL_RTX;
5700 /* Otherwise call the wrapper. This should be equivalent for the rest of
5701 compiler, so the code does not diverge, and the wrapper may run the
5702 code necessary for keeping the profiling sane. */
5704 switch (DECL_FUNCTION_CODE (fn))
5706 case BUILT_IN_FORK:
5707 id = get_identifier ("__gcov_fork");
5708 break;
5710 case BUILT_IN_EXECL:
5711 id = get_identifier ("__gcov_execl");
5712 break;
5714 case BUILT_IN_EXECV:
5715 id = get_identifier ("__gcov_execv");
5716 break;
5718 case BUILT_IN_EXECLP:
5719 id = get_identifier ("__gcov_execlp");
5720 break;
5722 case BUILT_IN_EXECLE:
5723 id = get_identifier ("__gcov_execle");
5724 break;
5726 case BUILT_IN_EXECVP:
5727 id = get_identifier ("__gcov_execvp");
5728 break;
5730 case BUILT_IN_EXECVE:
5731 id = get_identifier ("__gcov_execve");
5732 break;
5734 default:
5735 gcc_unreachable ();
5738 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5739 FUNCTION_DECL, id, TREE_TYPE (fn));
5740 DECL_EXTERNAL (decl) = 1;
5741 TREE_PUBLIC (decl) = 1;
5742 DECL_ARTIFICIAL (decl) = 1;
5743 TREE_NOTHROW (decl) = 1;
5744 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5745 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5746 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5747 return expand_call (call, target, ignore);
5752 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5753 the pointer in these functions is void*, the tree optimizers may remove
5754 casts. The mode computed in expand_builtin isn't reliable either, due
5755 to __sync_bool_compare_and_swap.
5757 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5758 group of builtins. This gives us log2 of the mode size. */
5760 static inline machine_mode
5761 get_builtin_sync_mode (int fcode_diff)
5763 /* The size is not negotiable, so ask not to get BLKmode in return
5764 if the target indicates that a smaller size would be better. */
5765 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5768 /* Expand the memory expression LOC and return the appropriate memory operand
5769 for the builtin_sync operations. */
5771 static rtx
5772 get_builtin_sync_mem (tree loc, machine_mode mode)
5774 rtx addr, mem;
5776 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5777 addr = convert_memory_address (Pmode, addr);
5779 /* Note that we explicitly do not want any alias information for this
5780 memory, so that we kill all other live memories. Otherwise we don't
5781 satisfy the full barrier semantics of the intrinsic. */
5782 mem = validize_mem (gen_rtx_MEM (mode, addr));
5784 /* The alignment needs to be at least according to that of the mode. */
5785 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5786 get_pointer_alignment (loc)));
5787 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5788 MEM_VOLATILE_P (mem) = 1;
5790 return mem;
5793 /* Make sure an argument is in the right mode.
5794 EXP is the tree argument.
5795 MODE is the mode it should be in. */
5797 static rtx
5798 expand_expr_force_mode (tree exp, machine_mode mode)
5800 rtx val;
5801 machine_mode old_mode;
5803 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5804 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5805 of CONST_INTs, where we know the old_mode only from the call argument. */
5807 old_mode = GET_MODE (val);
5808 if (old_mode == VOIDmode)
5809 old_mode = TYPE_MODE (TREE_TYPE (exp));
5810 val = convert_modes (mode, old_mode, val, 1);
5811 return val;
5815 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5816 EXP is the CALL_EXPR. CODE is the rtx code
5817 that corresponds to the arithmetic or logical operation from the name;
5818 an exception here is that NOT actually means NAND. TARGET is an optional
5819 place for us to store the results; AFTER is true if this is the
5820 fetch_and_xxx form. */
5822 static rtx
5823 expand_builtin_sync_operation (machine_mode mode, tree exp,
5824 enum rtx_code code, bool after,
5825 rtx target)
5827 rtx val, mem;
5828 location_t loc = EXPR_LOCATION (exp);
5830 if (code == NOT && warn_sync_nand)
5832 tree fndecl = get_callee_fndecl (exp);
5833 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5835 static bool warned_f_a_n, warned_n_a_f;
5837 switch (fcode)
5839 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5840 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5841 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5842 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5843 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5844 if (warned_f_a_n)
5845 break;
5847 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5848 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5849 warned_f_a_n = true;
5850 break;
5852 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5853 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5854 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5855 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5856 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5857 if (warned_n_a_f)
5858 break;
5860 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5861 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5862 warned_n_a_f = true;
5863 break;
5865 default:
5866 gcc_unreachable ();
5870 /* Expand the operands. */
5871 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5872 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5874 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5875 after);
5878 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5879 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5880 true if this is the boolean form. TARGET is a place for us to store the
5881 results; this is NOT optional if IS_BOOL is true. */
5883 static rtx
5884 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5885 bool is_bool, rtx target)
5887 rtx old_val, new_val, mem;
5888 rtx *pbool, *poval;
5890 /* Expand the operands. */
5891 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5892 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5893 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5895 pbool = poval = NULL;
5896 if (target != const0_rtx)
5898 if (is_bool)
5899 pbool = &target;
5900 else
5901 poval = &target;
5903 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5904 false, MEMMODEL_SYNC_SEQ_CST,
5905 MEMMODEL_SYNC_SEQ_CST))
5906 return NULL_RTX;
5908 return target;
5911 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5912 general form is actually an atomic exchange, and some targets only
5913 support a reduced form with the second argument being a constant 1.
5914 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5915 the results. */
5917 static rtx
5918 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5919 rtx target)
5921 rtx val, mem;
5923 /* Expand the operands. */
5924 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5925 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5927 return expand_sync_lock_test_and_set (target, mem, val);
5930 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5932 static void
5933 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5935 rtx mem;
5937 /* Expand the operands. */
5938 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5940 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5943 /* Given an integer representing an ``enum memmodel'', verify its
5944 correctness and return the memory model enum. */
5946 static enum memmodel
5947 get_memmodel (tree exp)
5949 rtx op;
5950 unsigned HOST_WIDE_INT val;
5951 source_location loc
5952 = expansion_point_location_if_in_system_header (input_location);
5954 /* If the parameter is not a constant, it's a run time value so we'll just
5955 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5956 if (TREE_CODE (exp) != INTEGER_CST)
5957 return MEMMODEL_SEQ_CST;
5959 op = expand_normal (exp);
5961 val = INTVAL (op);
5962 if (targetm.memmodel_check)
5963 val = targetm.memmodel_check (val);
5964 else if (val & ~MEMMODEL_MASK)
5966 warning_at (loc, OPT_Winvalid_memory_model,
5967 "unknown architecture specifier in memory model to builtin");
5968 return MEMMODEL_SEQ_CST;
5971 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5972 if (memmodel_base (val) >= MEMMODEL_LAST)
5974 warning_at (loc, OPT_Winvalid_memory_model,
5975 "invalid memory model argument to builtin");
5976 return MEMMODEL_SEQ_CST;
5979 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5980 be conservative and promote consume to acquire. */
5981 if (val == MEMMODEL_CONSUME)
5982 val = MEMMODEL_ACQUIRE;
5984 return (enum memmodel) val;
5987 /* Expand the __atomic_exchange intrinsic:
5988 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5989 EXP is the CALL_EXPR.
5990 TARGET is an optional place for us to store the results. */
5992 static rtx
5993 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5995 rtx val, mem;
5996 enum memmodel model;
5998 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6000 if (!flag_inline_atomics)
6001 return NULL_RTX;
6003 /* Expand the operands. */
6004 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6005 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6007 return expand_atomic_exchange (target, mem, val, model);
6010 /* Expand the __atomic_compare_exchange intrinsic:
6011 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6012 TYPE desired, BOOL weak,
6013 enum memmodel success,
6014 enum memmodel failure)
6015 EXP is the CALL_EXPR.
6016 TARGET is an optional place for us to store the results. */
6018 static rtx
6019 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6020 rtx target)
6022 rtx expect, desired, mem, oldval;
6023 rtx_code_label *label;
6024 enum memmodel success, failure;
6025 tree weak;
6026 bool is_weak;
6027 source_location loc
6028 = expansion_point_location_if_in_system_header (input_location);
6030 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6031 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6033 if (failure > success)
6035 warning_at (loc, OPT_Winvalid_memory_model,
6036 "failure memory model cannot be stronger than success "
6037 "memory model for %<__atomic_compare_exchange%>");
6038 success = MEMMODEL_SEQ_CST;
6041 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6043 warning_at (loc, OPT_Winvalid_memory_model,
6044 "invalid failure memory model for "
6045 "%<__atomic_compare_exchange%>");
6046 failure = MEMMODEL_SEQ_CST;
6047 success = MEMMODEL_SEQ_CST;
6051 if (!flag_inline_atomics)
6052 return NULL_RTX;
6054 /* Expand the operands. */
6055 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6057 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6058 expect = convert_memory_address (Pmode, expect);
6059 expect = gen_rtx_MEM (mode, expect);
6060 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6062 weak = CALL_EXPR_ARG (exp, 3);
6063 is_weak = false;
6064 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6065 is_weak = true;
6067 if (target == const0_rtx)
6068 target = NULL;
6070 /* Lest the rtl backend create a race condition with an imporoper store
6071 to memory, always create a new pseudo for OLDVAL. */
6072 oldval = NULL;
6074 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6075 is_weak, success, failure))
6076 return NULL_RTX;
6078 /* Conditionally store back to EXPECT, lest we create a race condition
6079 with an improper store to memory. */
6080 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6081 the normal case where EXPECT is totally private, i.e. a register. At
6082 which point the store can be unconditional. */
6083 label = gen_label_rtx ();
6084 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6085 GET_MODE (target), 1, label);
6086 emit_move_insn (expect, oldval);
6087 emit_label (label);
6089 return target;
6092 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6093 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6094 call. The weak parameter must be dropped to match the expected parameter
6095 list and the expected argument changed from value to pointer to memory
6096 slot. */
6098 static void
6099 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6101 unsigned int z;
6102 vec<tree, va_gc> *vec;
6104 vec_alloc (vec, 5);
6105 vec->quick_push (gimple_call_arg (call, 0));
6106 tree expected = gimple_call_arg (call, 1);
6107 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6108 TREE_TYPE (expected));
6109 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6110 if (expd != x)
6111 emit_move_insn (x, expd);
6112 tree v = make_tree (TREE_TYPE (expected), x);
6113 vec->quick_push (build1 (ADDR_EXPR,
6114 build_pointer_type (TREE_TYPE (expected)), v));
6115 vec->quick_push (gimple_call_arg (call, 2));
6116 /* Skip the boolean weak parameter. */
6117 for (z = 4; z < 6; z++)
6118 vec->quick_push (gimple_call_arg (call, z));
6119 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6120 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6121 gcc_assert (bytes_log2 < 5);
6122 built_in_function fncode
6123 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6124 + bytes_log2);
6125 tree fndecl = builtin_decl_explicit (fncode);
6126 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6127 fndecl);
6128 tree exp = build_call_vec (boolean_type_node, fn, vec);
6129 tree lhs = gimple_call_lhs (call);
6130 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6131 if (lhs)
6133 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6134 if (GET_MODE (boolret) != mode)
6135 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6136 x = force_reg (mode, x);
6137 write_complex_part (target, boolret, true);
6138 write_complex_part (target, x, false);
6142 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6144 void
6145 expand_ifn_atomic_compare_exchange (gcall *call)
6147 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6148 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6149 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6150 rtx expect, desired, mem, oldval, boolret;
6151 enum memmodel success, failure;
6152 tree lhs;
6153 bool is_weak;
6154 source_location loc
6155 = expansion_point_location_if_in_system_header (gimple_location (call));
6157 success = get_memmodel (gimple_call_arg (call, 4));
6158 failure = get_memmodel (gimple_call_arg (call, 5));
6160 if (failure > success)
6162 warning_at (loc, OPT_Winvalid_memory_model,
6163 "failure memory model cannot be stronger than success "
6164 "memory model for %<__atomic_compare_exchange%>");
6165 success = MEMMODEL_SEQ_CST;
6168 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6170 warning_at (loc, OPT_Winvalid_memory_model,
6171 "invalid failure memory model for "
6172 "%<__atomic_compare_exchange%>");
6173 failure = MEMMODEL_SEQ_CST;
6174 success = MEMMODEL_SEQ_CST;
6177 if (!flag_inline_atomics)
6179 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6180 return;
6183 /* Expand the operands. */
6184 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6186 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6187 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6189 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6191 boolret = NULL;
6192 oldval = NULL;
6194 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6195 is_weak, success, failure))
6197 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6198 return;
6201 lhs = gimple_call_lhs (call);
6202 if (lhs)
6204 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6205 if (GET_MODE (boolret) != mode)
6206 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6207 write_complex_part (target, boolret, true);
6208 write_complex_part (target, oldval, false);
6212 /* Expand the __atomic_load intrinsic:
6213 TYPE __atomic_load (TYPE *object, enum memmodel)
6214 EXP is the CALL_EXPR.
6215 TARGET is an optional place for us to store the results. */
6217 static rtx
6218 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6220 rtx mem;
6221 enum memmodel model;
6223 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6224 if (is_mm_release (model) || is_mm_acq_rel (model))
6226 source_location loc
6227 = expansion_point_location_if_in_system_header (input_location);
6228 warning_at (loc, OPT_Winvalid_memory_model,
6229 "invalid memory model for %<__atomic_load%>");
6230 model = MEMMODEL_SEQ_CST;
6233 if (!flag_inline_atomics)
6234 return NULL_RTX;
6236 /* Expand the operand. */
6237 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6239 return expand_atomic_load (target, mem, model);
6243 /* Expand the __atomic_store intrinsic:
6244 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6245 EXP is the CALL_EXPR.
6246 TARGET is an optional place for us to store the results. */
6248 static rtx
6249 expand_builtin_atomic_store (machine_mode mode, tree exp)
6251 rtx mem, val;
6252 enum memmodel model;
6254 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6255 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6256 || is_mm_release (model)))
6258 source_location loc
6259 = expansion_point_location_if_in_system_header (input_location);
6260 warning_at (loc, OPT_Winvalid_memory_model,
6261 "invalid memory model for %<__atomic_store%>");
6262 model = MEMMODEL_SEQ_CST;
6265 if (!flag_inline_atomics)
6266 return NULL_RTX;
6268 /* Expand the operands. */
6269 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6270 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6272 return expand_atomic_store (mem, val, model, false);
6275 /* Expand the __atomic_fetch_XXX intrinsic:
6276 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6277 EXP is the CALL_EXPR.
6278 TARGET is an optional place for us to store the results.
6279 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6280 FETCH_AFTER is true if returning the result of the operation.
6281 FETCH_AFTER is false if returning the value before the operation.
6282 IGNORE is true if the result is not used.
6283 EXT_CALL is the correct builtin for an external call if this cannot be
6284 resolved to an instruction sequence. */
6286 static rtx
6287 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6288 enum rtx_code code, bool fetch_after,
6289 bool ignore, enum built_in_function ext_call)
6291 rtx val, mem, ret;
6292 enum memmodel model;
6293 tree fndecl;
6294 tree addr;
6296 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6298 /* Expand the operands. */
6299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6300 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6302 /* Only try generating instructions if inlining is turned on. */
6303 if (flag_inline_atomics)
6305 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6306 if (ret)
6307 return ret;
6310 /* Return if a different routine isn't needed for the library call. */
6311 if (ext_call == BUILT_IN_NONE)
6312 return NULL_RTX;
6314 /* Change the call to the specified function. */
6315 fndecl = get_callee_fndecl (exp);
6316 addr = CALL_EXPR_FN (exp);
6317 STRIP_NOPS (addr);
6319 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6320 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6322 /* If we will emit code after the call, the call can not be a tail call.
6323 If it is emitted as a tail call, a barrier is emitted after it, and
6324 then all trailing code is removed. */
6325 if (!ignore)
6326 CALL_EXPR_TAILCALL (exp) = 0;
6328 /* Expand the call here so we can emit trailing code. */
6329 ret = expand_call (exp, target, ignore);
6331 /* Replace the original function just in case it matters. */
6332 TREE_OPERAND (addr, 0) = fndecl;
6334 /* Then issue the arithmetic correction to return the right result. */
6335 if (!ignore)
6337 if (code == NOT)
6339 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6340 OPTAB_LIB_WIDEN);
6341 ret = expand_simple_unop (mode, NOT, ret, target, true);
6343 else
6344 ret = expand_simple_binop (mode, code, ret, val, target, true,
6345 OPTAB_LIB_WIDEN);
6347 return ret;
6350 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6352 void
6353 expand_ifn_atomic_bit_test_and (gcall *call)
6355 tree ptr = gimple_call_arg (call, 0);
6356 tree bit = gimple_call_arg (call, 1);
6357 tree flag = gimple_call_arg (call, 2);
6358 tree lhs = gimple_call_lhs (call);
6359 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6360 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6361 enum rtx_code code;
6362 optab optab;
6363 struct expand_operand ops[5];
6365 gcc_assert (flag_inline_atomics);
6367 if (gimple_call_num_args (call) == 4)
6368 model = get_memmodel (gimple_call_arg (call, 3));
6370 rtx mem = get_builtin_sync_mem (ptr, mode);
6371 rtx val = expand_expr_force_mode (bit, mode);
6373 switch (gimple_call_internal_fn (call))
6375 case IFN_ATOMIC_BIT_TEST_AND_SET:
6376 code = IOR;
6377 optab = atomic_bit_test_and_set_optab;
6378 break;
6379 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6380 code = XOR;
6381 optab = atomic_bit_test_and_complement_optab;
6382 break;
6383 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6384 code = AND;
6385 optab = atomic_bit_test_and_reset_optab;
6386 break;
6387 default:
6388 gcc_unreachable ();
6391 if (lhs == NULL_TREE)
6393 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6394 val, NULL_RTX, true, OPTAB_DIRECT);
6395 if (code == AND)
6396 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6397 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6398 return;
6401 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6402 enum insn_code icode = direct_optab_handler (optab, mode);
6403 gcc_assert (icode != CODE_FOR_nothing);
6404 create_output_operand (&ops[0], target, mode);
6405 create_fixed_operand (&ops[1], mem);
6406 create_convert_operand_to (&ops[2], val, mode, true);
6407 create_integer_operand (&ops[3], model);
6408 create_integer_operand (&ops[4], integer_onep (flag));
6409 if (maybe_expand_insn (icode, 5, ops))
6410 return;
6412 rtx bitval = val;
6413 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6414 val, NULL_RTX, true, OPTAB_DIRECT);
6415 rtx maskval = val;
6416 if (code == AND)
6417 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6418 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6419 code, model, false);
6420 if (integer_onep (flag))
6422 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6423 NULL_RTX, true, OPTAB_DIRECT);
6424 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6425 true, OPTAB_DIRECT);
6427 else
6428 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6429 OPTAB_DIRECT);
6430 if (result != target)
6431 emit_move_insn (target, result);
6434 /* Expand an atomic clear operation.
6435 void _atomic_clear (BOOL *obj, enum memmodel)
6436 EXP is the call expression. */
6438 static rtx
6439 expand_builtin_atomic_clear (tree exp)
6441 machine_mode mode;
6442 rtx mem, ret;
6443 enum memmodel model;
6445 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6446 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6447 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6449 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6451 source_location loc
6452 = expansion_point_location_if_in_system_header (input_location);
6453 warning_at (loc, OPT_Winvalid_memory_model,
6454 "invalid memory model for %<__atomic_store%>");
6455 model = MEMMODEL_SEQ_CST;
6458 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6459 Failing that, a store is issued by __atomic_store. The only way this can
6460 fail is if the bool type is larger than a word size. Unlikely, but
6461 handle it anyway for completeness. Assume a single threaded model since
6462 there is no atomic support in this case, and no barriers are required. */
6463 ret = expand_atomic_store (mem, const0_rtx, model, true);
6464 if (!ret)
6465 emit_move_insn (mem, const0_rtx);
6466 return const0_rtx;
6469 /* Expand an atomic test_and_set operation.
6470 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6471 EXP is the call expression. */
6473 static rtx
6474 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6476 rtx mem;
6477 enum memmodel model;
6478 machine_mode mode;
6480 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6482 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6484 return expand_atomic_test_and_set (target, mem, model);
6488 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6489 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6491 static tree
6492 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6494 int size;
6495 machine_mode mode;
6496 unsigned int mode_align, type_align;
6498 if (TREE_CODE (arg0) != INTEGER_CST)
6499 return NULL_TREE;
6501 /* We need a corresponding integer mode for the access to be lock-free. */
6502 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6503 if (!int_mode_for_size (size, 0).exists (&mode))
6504 return boolean_false_node;
6506 mode_align = GET_MODE_ALIGNMENT (mode);
6508 if (TREE_CODE (arg1) == INTEGER_CST)
6510 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6512 /* Either this argument is null, or it's a fake pointer encoding
6513 the alignment of the object. */
6514 val = least_bit_hwi (val);
6515 val *= BITS_PER_UNIT;
6517 if (val == 0 || mode_align < val)
6518 type_align = mode_align;
6519 else
6520 type_align = val;
6522 else
6524 tree ttype = TREE_TYPE (arg1);
6526 /* This function is usually invoked and folded immediately by the front
6527 end before anything else has a chance to look at it. The pointer
6528 parameter at this point is usually cast to a void *, so check for that
6529 and look past the cast. */
6530 if (CONVERT_EXPR_P (arg1)
6531 && POINTER_TYPE_P (ttype)
6532 && VOID_TYPE_P (TREE_TYPE (ttype))
6533 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6534 arg1 = TREE_OPERAND (arg1, 0);
6536 ttype = TREE_TYPE (arg1);
6537 gcc_assert (POINTER_TYPE_P (ttype));
6539 /* Get the underlying type of the object. */
6540 ttype = TREE_TYPE (ttype);
6541 type_align = TYPE_ALIGN (ttype);
6544 /* If the object has smaller alignment, the lock free routines cannot
6545 be used. */
6546 if (type_align < mode_align)
6547 return boolean_false_node;
6549 /* Check if a compare_and_swap pattern exists for the mode which represents
6550 the required size. The pattern is not allowed to fail, so the existence
6551 of the pattern indicates support is present. Also require that an
6552 atomic load exists for the required size. */
6553 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6554 return boolean_true_node;
6555 else
6556 return boolean_false_node;
6559 /* Return true if the parameters to call EXP represent an object which will
6560 always generate lock free instructions. The first argument represents the
6561 size of the object, and the second parameter is a pointer to the object
6562 itself. If NULL is passed for the object, then the result is based on
6563 typical alignment for an object of the specified size. Otherwise return
6564 false. */
6566 static rtx
6567 expand_builtin_atomic_always_lock_free (tree exp)
6569 tree size;
6570 tree arg0 = CALL_EXPR_ARG (exp, 0);
6571 tree arg1 = CALL_EXPR_ARG (exp, 1);
6573 if (TREE_CODE (arg0) != INTEGER_CST)
6575 error ("non-constant argument 1 to __atomic_always_lock_free");
6576 return const0_rtx;
6579 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6580 if (size == boolean_true_node)
6581 return const1_rtx;
6582 return const0_rtx;
6585 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6586 is lock free on this architecture. */
6588 static tree
6589 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6591 if (!flag_inline_atomics)
6592 return NULL_TREE;
6594 /* If it isn't always lock free, don't generate a result. */
6595 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6596 return boolean_true_node;
6598 return NULL_TREE;
6601 /* Return true if the parameters to call EXP represent an object which will
6602 always generate lock free instructions. The first argument represents the
6603 size of the object, and the second parameter is a pointer to the object
6604 itself. If NULL is passed for the object, then the result is based on
6605 typical alignment for an object of the specified size. Otherwise return
6606 NULL*/
6608 static rtx
6609 expand_builtin_atomic_is_lock_free (tree exp)
6611 tree size;
6612 tree arg0 = CALL_EXPR_ARG (exp, 0);
6613 tree arg1 = CALL_EXPR_ARG (exp, 1);
6615 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6617 error ("non-integer argument 1 to __atomic_is_lock_free");
6618 return NULL_RTX;
6621 if (!flag_inline_atomics)
6622 return NULL_RTX;
6624 /* If the value is known at compile time, return the RTX for it. */
6625 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6626 if (size == boolean_true_node)
6627 return const1_rtx;
6629 return NULL_RTX;
6632 /* Expand the __atomic_thread_fence intrinsic:
6633 void __atomic_thread_fence (enum memmodel)
6634 EXP is the CALL_EXPR. */
6636 static void
6637 expand_builtin_atomic_thread_fence (tree exp)
6639 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6640 expand_mem_thread_fence (model);
6643 /* Expand the __atomic_signal_fence intrinsic:
6644 void __atomic_signal_fence (enum memmodel)
6645 EXP is the CALL_EXPR. */
6647 static void
6648 expand_builtin_atomic_signal_fence (tree exp)
6650 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6651 expand_mem_signal_fence (model);
6654 /* Expand the __sync_synchronize intrinsic. */
6656 static void
6657 expand_builtin_sync_synchronize (void)
6659 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6662 static rtx
6663 expand_builtin_thread_pointer (tree exp, rtx target)
6665 enum insn_code icode;
6666 if (!validate_arglist (exp, VOID_TYPE))
6667 return const0_rtx;
6668 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6669 if (icode != CODE_FOR_nothing)
6671 struct expand_operand op;
6672 /* If the target is not sutitable then create a new target. */
6673 if (target == NULL_RTX
6674 || !REG_P (target)
6675 || GET_MODE (target) != Pmode)
6676 target = gen_reg_rtx (Pmode);
6677 create_output_operand (&op, target, Pmode);
6678 expand_insn (icode, 1, &op);
6679 return target;
6681 error ("__builtin_thread_pointer is not supported on this target");
6682 return const0_rtx;
6685 static void
6686 expand_builtin_set_thread_pointer (tree exp)
6688 enum insn_code icode;
6689 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6690 return;
6691 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6692 if (icode != CODE_FOR_nothing)
6694 struct expand_operand op;
6695 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6696 Pmode, EXPAND_NORMAL);
6697 create_input_operand (&op, val, Pmode);
6698 expand_insn (icode, 1, &op);
6699 return;
6701 error ("__builtin_set_thread_pointer is not supported on this target");
6705 /* Emit code to restore the current value of stack. */
6707 static void
6708 expand_stack_restore (tree var)
6710 rtx_insn *prev;
6711 rtx sa = expand_normal (var);
6713 sa = convert_memory_address (Pmode, sa);
6715 prev = get_last_insn ();
6716 emit_stack_restore (SAVE_BLOCK, sa);
6718 record_new_stack_level ();
6720 fixup_args_size_notes (prev, get_last_insn (), 0);
6723 /* Emit code to save the current value of stack. */
6725 static rtx
6726 expand_stack_save (void)
6728 rtx ret = NULL_RTX;
6730 emit_stack_save (SAVE_BLOCK, &ret);
6731 return ret;
6734 /* Emit code to get the openacc gang, worker or vector id or size. */
6736 static rtx
6737 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6739 const char *name;
6740 rtx fallback_retval;
6741 rtx_insn *(*gen_fn) (rtx, rtx);
6742 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6744 case BUILT_IN_GOACC_PARLEVEL_ID:
6745 name = "__builtin_goacc_parlevel_id";
6746 fallback_retval = const0_rtx;
6747 gen_fn = targetm.gen_oacc_dim_pos;
6748 break;
6749 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6750 name = "__builtin_goacc_parlevel_size";
6751 fallback_retval = const1_rtx;
6752 gen_fn = targetm.gen_oacc_dim_size;
6753 break;
6754 default:
6755 gcc_unreachable ();
6758 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6760 error ("%qs only supported in OpenACC code", name);
6761 return const0_rtx;
6764 tree arg = CALL_EXPR_ARG (exp, 0);
6765 if (TREE_CODE (arg) != INTEGER_CST)
6767 error ("non-constant argument 0 to %qs", name);
6768 return const0_rtx;
6771 int dim = TREE_INT_CST_LOW (arg);
6772 switch (dim)
6774 case GOMP_DIM_GANG:
6775 case GOMP_DIM_WORKER:
6776 case GOMP_DIM_VECTOR:
6777 break;
6778 default:
6779 error ("illegal argument 0 to %qs", name);
6780 return const0_rtx;
6783 if (ignore)
6784 return target;
6786 if (target == NULL_RTX)
6787 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6789 if (!targetm.have_oacc_dim_size ())
6791 emit_move_insn (target, fallback_retval);
6792 return target;
6795 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6796 emit_insn (gen_fn (reg, GEN_INT (dim)));
6797 if (reg != target)
6798 emit_move_insn (target, reg);
6800 return target;
6803 /* Expand a string compare operation using a sequence of char comparison
6804 to get rid of the calling overhead, with result going to TARGET if
6805 that's convenient.
6807 VAR_STR is the variable string source;
6808 CONST_STR is the constant string source;
6809 LENGTH is the number of chars to compare;
6810 CONST_STR_N indicates which source string is the constant string;
6811 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6813 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6815 target = (int) (unsigned char) var_str[0]
6816 - (int) (unsigned char) const_str[0];
6817 if (target != 0)
6818 goto ne_label;
6820 target = (int) (unsigned char) var_str[length - 2]
6821 - (int) (unsigned char) const_str[length - 2];
6822 if (target != 0)
6823 goto ne_label;
6824 target = (int) (unsigned char) var_str[length - 1]
6825 - (int) (unsigned char) const_str[length - 1];
6826 ne_label:
6829 static rtx
6830 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6831 unsigned HOST_WIDE_INT length,
6832 int const_str_n, machine_mode mode)
6834 HOST_WIDE_INT offset = 0;
6835 rtx var_rtx_array
6836 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6837 rtx var_rtx = NULL_RTX;
6838 rtx const_rtx = NULL_RTX;
6839 rtx result = target ? target : gen_reg_rtx (mode);
6840 rtx_code_label *ne_label = gen_label_rtx ();
6841 tree unit_type_node = unsigned_char_type_node;
6842 scalar_int_mode unit_mode
6843 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6845 start_sequence ();
6847 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6849 var_rtx
6850 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6851 const_rtx = c_readstr (const_str + offset, unit_mode);
6852 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6853 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6855 op0 = convert_modes (mode, unit_mode, op0, 1);
6856 op1 = convert_modes (mode, unit_mode, op1, 1);
6857 result = expand_simple_binop (mode, MINUS, op0, op1,
6858 result, 1, OPTAB_WIDEN);
6859 if (i < length - 1)
6860 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6861 mode, true, ne_label);
6862 offset += GET_MODE_SIZE (unit_mode);
6865 emit_label (ne_label);
6866 rtx_insn *insns = get_insns ();
6867 end_sequence ();
6868 emit_insn (insns);
6870 return result;
6873 /* Inline expansion a call to str(n)cmp, with result going to
6874 TARGET if that's convenient.
6875 If the call is not been inlined, return NULL_RTX. */
6876 static rtx
6877 inline_expand_builtin_string_cmp (tree exp, rtx target)
6879 tree fndecl = get_callee_fndecl (exp);
6880 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6881 unsigned HOST_WIDE_INT length = 0;
6882 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6884 /* Do NOT apply this inlining expansion when optimizing for size or
6885 optimization level below 2. */
6886 if (optimize < 2 || optimize_insn_for_size_p ())
6887 return NULL_RTX;
6889 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6890 || fcode == BUILT_IN_STRNCMP
6891 || fcode == BUILT_IN_MEMCMP);
6893 /* On a target where the type of the call (int) has same or narrower presicion
6894 than unsigned char, give up the inlining expansion. */
6895 if (TYPE_PRECISION (unsigned_char_type_node)
6896 >= TYPE_PRECISION (TREE_TYPE (exp)))
6897 return NULL_RTX;
6899 tree arg1 = CALL_EXPR_ARG (exp, 0);
6900 tree arg2 = CALL_EXPR_ARG (exp, 1);
6901 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6903 unsigned HOST_WIDE_INT len1 = 0;
6904 unsigned HOST_WIDE_INT len2 = 0;
6905 unsigned HOST_WIDE_INT len3 = 0;
6907 const char *src_str1 = c_getstr (arg1, &len1);
6908 const char *src_str2 = c_getstr (arg2, &len2);
6910 /* If neither strings is constant string, the call is not qualify. */
6911 if (!src_str1 && !src_str2)
6912 return NULL_RTX;
6914 /* For strncmp, if the length is not a const, not qualify. */
6915 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6916 return NULL_RTX;
6918 int const_str_n = 0;
6919 if (!len1)
6920 const_str_n = 2;
6921 else if (!len2)
6922 const_str_n = 1;
6923 else if (len2 > len1)
6924 const_str_n = 1;
6925 else
6926 const_str_n = 2;
6928 gcc_checking_assert (const_str_n > 0);
6929 length = (const_str_n == 1) ? len1 : len2;
6931 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6932 length = len3;
6934 /* If the length of the comparision is larger than the threshold,
6935 do nothing. */
6936 if (length > (unsigned HOST_WIDE_INT)
6937 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6938 return NULL_RTX;
6940 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6942 /* Now, start inline expansion the call. */
6943 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6944 (const_str_n == 1) ? src_str1 : src_str2, length,
6945 const_str_n, mode);
6948 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6949 represents the size of the first argument to that call, or VOIDmode
6950 if the argument is a pointer. IGNORE will be true if the result
6951 isn't used. */
6952 static rtx
6953 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6954 bool ignore)
6956 rtx val, failsafe;
6957 unsigned nargs = call_expr_nargs (exp);
6959 tree arg0 = CALL_EXPR_ARG (exp, 0);
6961 if (mode == VOIDmode)
6963 mode = TYPE_MODE (TREE_TYPE (arg0));
6964 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6967 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6969 /* An optional second argument can be used as a failsafe value on
6970 some machines. If it isn't present, then the failsafe value is
6971 assumed to be 0. */
6972 if (nargs > 1)
6974 tree arg1 = CALL_EXPR_ARG (exp, 1);
6975 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6977 else
6978 failsafe = const0_rtx;
6980 /* If the result isn't used, the behavior is undefined. It would be
6981 nice to emit a warning here, but path splitting means this might
6982 happen with legitimate code. So simply drop the builtin
6983 expansion in that case; we've handled any side-effects above. */
6984 if (ignore)
6985 return const0_rtx;
6987 /* If we don't have a suitable target, create one to hold the result. */
6988 if (target == NULL || GET_MODE (target) != mode)
6989 target = gen_reg_rtx (mode);
6991 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6992 val = convert_modes (mode, VOIDmode, val, false);
6994 return targetm.speculation_safe_value (mode, target, val, failsafe);
6997 /* Expand an expression EXP that calls a built-in function,
6998 with result going to TARGET if that's convenient
6999 (and in mode MODE if that's convenient).
7000 SUBTARGET may be used as the target for computing one of EXP's operands.
7001 IGNORE is nonzero if the value is to be ignored. */
7004 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7005 int ignore)
7007 tree fndecl = get_callee_fndecl (exp);
7008 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7009 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7010 int flags;
7012 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7013 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7015 /* When ASan is enabled, we don't want to expand some memory/string
7016 builtins and rely on libsanitizer's hooks. This allows us to avoid
7017 redundant checks and be sure, that possible overflow will be detected
7018 by ASan. */
7020 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7021 return expand_call (exp, target, ignore);
7023 /* When not optimizing, generate calls to library functions for a certain
7024 set of builtins. */
7025 if (!optimize
7026 && !called_as_built_in (fndecl)
7027 && fcode != BUILT_IN_FORK
7028 && fcode != BUILT_IN_EXECL
7029 && fcode != BUILT_IN_EXECV
7030 && fcode != BUILT_IN_EXECLP
7031 && fcode != BUILT_IN_EXECLE
7032 && fcode != BUILT_IN_EXECVP
7033 && fcode != BUILT_IN_EXECVE
7034 && !ALLOCA_FUNCTION_CODE_P (fcode)
7035 && fcode != BUILT_IN_FREE)
7036 return expand_call (exp, target, ignore);
7038 /* The built-in function expanders test for target == const0_rtx
7039 to determine whether the function's result will be ignored. */
7040 if (ignore)
7041 target = const0_rtx;
7043 /* If the result of a pure or const built-in function is ignored, and
7044 none of its arguments are volatile, we can avoid expanding the
7045 built-in call and just evaluate the arguments for side-effects. */
7046 if (target == const0_rtx
7047 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7048 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7050 bool volatilep = false;
7051 tree arg;
7052 call_expr_arg_iterator iter;
7054 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7055 if (TREE_THIS_VOLATILE (arg))
7057 volatilep = true;
7058 break;
7061 if (! volatilep)
7063 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7064 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7065 return const0_rtx;
7069 switch (fcode)
7071 CASE_FLT_FN (BUILT_IN_FABS):
7072 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7073 case BUILT_IN_FABSD32:
7074 case BUILT_IN_FABSD64:
7075 case BUILT_IN_FABSD128:
7076 target = expand_builtin_fabs (exp, target, subtarget);
7077 if (target)
7078 return target;
7079 break;
7081 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7082 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7083 target = expand_builtin_copysign (exp, target, subtarget);
7084 if (target)
7085 return target;
7086 break;
7088 /* Just do a normal library call if we were unable to fold
7089 the values. */
7090 CASE_FLT_FN (BUILT_IN_CABS):
7091 break;
7093 CASE_FLT_FN (BUILT_IN_FMA):
7094 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7095 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7096 if (target)
7097 return target;
7098 break;
7100 CASE_FLT_FN (BUILT_IN_ILOGB):
7101 if (! flag_unsafe_math_optimizations)
7102 break;
7103 gcc_fallthrough ();
7104 CASE_FLT_FN (BUILT_IN_ISINF):
7105 CASE_FLT_FN (BUILT_IN_FINITE):
7106 case BUILT_IN_ISFINITE:
7107 case BUILT_IN_ISNORMAL:
7108 target = expand_builtin_interclass_mathfn (exp, target);
7109 if (target)
7110 return target;
7111 break;
7113 CASE_FLT_FN (BUILT_IN_ICEIL):
7114 CASE_FLT_FN (BUILT_IN_LCEIL):
7115 CASE_FLT_FN (BUILT_IN_LLCEIL):
7116 CASE_FLT_FN (BUILT_IN_LFLOOR):
7117 CASE_FLT_FN (BUILT_IN_IFLOOR):
7118 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7119 target = expand_builtin_int_roundingfn (exp, target);
7120 if (target)
7121 return target;
7122 break;
7124 CASE_FLT_FN (BUILT_IN_IRINT):
7125 CASE_FLT_FN (BUILT_IN_LRINT):
7126 CASE_FLT_FN (BUILT_IN_LLRINT):
7127 CASE_FLT_FN (BUILT_IN_IROUND):
7128 CASE_FLT_FN (BUILT_IN_LROUND):
7129 CASE_FLT_FN (BUILT_IN_LLROUND):
7130 target = expand_builtin_int_roundingfn_2 (exp, target);
7131 if (target)
7132 return target;
7133 break;
7135 CASE_FLT_FN (BUILT_IN_POWI):
7136 target = expand_builtin_powi (exp, target);
7137 if (target)
7138 return target;
7139 break;
7141 CASE_FLT_FN (BUILT_IN_CEXPI):
7142 target = expand_builtin_cexpi (exp, target);
7143 gcc_assert (target);
7144 return target;
7146 CASE_FLT_FN (BUILT_IN_SIN):
7147 CASE_FLT_FN (BUILT_IN_COS):
7148 if (! flag_unsafe_math_optimizations)
7149 break;
7150 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7151 if (target)
7152 return target;
7153 break;
7155 CASE_FLT_FN (BUILT_IN_SINCOS):
7156 if (! flag_unsafe_math_optimizations)
7157 break;
7158 target = expand_builtin_sincos (exp);
7159 if (target)
7160 return target;
7161 break;
7163 case BUILT_IN_APPLY_ARGS:
7164 return expand_builtin_apply_args ();
7166 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7167 FUNCTION with a copy of the parameters described by
7168 ARGUMENTS, and ARGSIZE. It returns a block of memory
7169 allocated on the stack into which is stored all the registers
7170 that might possibly be used for returning the result of a
7171 function. ARGUMENTS is the value returned by
7172 __builtin_apply_args. ARGSIZE is the number of bytes of
7173 arguments that must be copied. ??? How should this value be
7174 computed? We'll also need a safe worst case value for varargs
7175 functions. */
7176 case BUILT_IN_APPLY:
7177 if (!validate_arglist (exp, POINTER_TYPE,
7178 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7179 && !validate_arglist (exp, REFERENCE_TYPE,
7180 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7181 return const0_rtx;
7182 else
7184 rtx ops[3];
7186 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7187 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7188 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7190 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7193 /* __builtin_return (RESULT) causes the function to return the
7194 value described by RESULT. RESULT is address of the block of
7195 memory returned by __builtin_apply. */
7196 case BUILT_IN_RETURN:
7197 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7198 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7199 return const0_rtx;
7201 case BUILT_IN_SAVEREGS:
7202 return expand_builtin_saveregs ();
7204 case BUILT_IN_VA_ARG_PACK:
7205 /* All valid uses of __builtin_va_arg_pack () are removed during
7206 inlining. */
7207 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7208 return const0_rtx;
7210 case BUILT_IN_VA_ARG_PACK_LEN:
7211 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7212 inlining. */
7213 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7214 return const0_rtx;
7216 /* Return the address of the first anonymous stack arg. */
7217 case BUILT_IN_NEXT_ARG:
7218 if (fold_builtin_next_arg (exp, false))
7219 return const0_rtx;
7220 return expand_builtin_next_arg ();
7222 case BUILT_IN_CLEAR_CACHE:
7223 target = expand_builtin___clear_cache (exp);
7224 if (target)
7225 return target;
7226 break;
7228 case BUILT_IN_CLASSIFY_TYPE:
7229 return expand_builtin_classify_type (exp);
7231 case BUILT_IN_CONSTANT_P:
7232 return const0_rtx;
7234 case BUILT_IN_FRAME_ADDRESS:
7235 case BUILT_IN_RETURN_ADDRESS:
7236 return expand_builtin_frame_address (fndecl, exp);
7238 /* Returns the address of the area where the structure is returned.
7239 0 otherwise. */
7240 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7241 if (call_expr_nargs (exp) != 0
7242 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7243 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7244 return const0_rtx;
7245 else
7246 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7248 CASE_BUILT_IN_ALLOCA:
7249 target = expand_builtin_alloca (exp);
7250 if (target)
7251 return target;
7252 break;
7254 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7255 return expand_asan_emit_allocas_unpoison (exp);
7257 case BUILT_IN_STACK_SAVE:
7258 return expand_stack_save ();
7260 case BUILT_IN_STACK_RESTORE:
7261 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7262 return const0_rtx;
7264 case BUILT_IN_BSWAP16:
7265 case BUILT_IN_BSWAP32:
7266 case BUILT_IN_BSWAP64:
7267 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7268 if (target)
7269 return target;
7270 break;
7272 CASE_INT_FN (BUILT_IN_FFS):
7273 target = expand_builtin_unop (target_mode, exp, target,
7274 subtarget, ffs_optab);
7275 if (target)
7276 return target;
7277 break;
7279 CASE_INT_FN (BUILT_IN_CLZ):
7280 target = expand_builtin_unop (target_mode, exp, target,
7281 subtarget, clz_optab);
7282 if (target)
7283 return target;
7284 break;
7286 CASE_INT_FN (BUILT_IN_CTZ):
7287 target = expand_builtin_unop (target_mode, exp, target,
7288 subtarget, ctz_optab);
7289 if (target)
7290 return target;
7291 break;
7293 CASE_INT_FN (BUILT_IN_CLRSB):
7294 target = expand_builtin_unop (target_mode, exp, target,
7295 subtarget, clrsb_optab);
7296 if (target)
7297 return target;
7298 break;
7300 CASE_INT_FN (BUILT_IN_POPCOUNT):
7301 target = expand_builtin_unop (target_mode, exp, target,
7302 subtarget, popcount_optab);
7303 if (target)
7304 return target;
7305 break;
7307 CASE_INT_FN (BUILT_IN_PARITY):
7308 target = expand_builtin_unop (target_mode, exp, target,
7309 subtarget, parity_optab);
7310 if (target)
7311 return target;
7312 break;
7314 case BUILT_IN_STRLEN:
7315 target = expand_builtin_strlen (exp, target, target_mode);
7316 if (target)
7317 return target;
7318 break;
7320 case BUILT_IN_STRNLEN:
7321 target = expand_builtin_strnlen (exp, target, target_mode);
7322 if (target)
7323 return target;
7324 break;
7326 case BUILT_IN_STRCAT:
7327 target = expand_builtin_strcat (exp, target);
7328 if (target)
7329 return target;
7330 break;
7332 case BUILT_IN_STRCPY:
7333 target = expand_builtin_strcpy (exp, target);
7334 if (target)
7335 return target;
7336 break;
7338 case BUILT_IN_STRNCAT:
7339 target = expand_builtin_strncat (exp, target);
7340 if (target)
7341 return target;
7342 break;
7344 case BUILT_IN_STRNCPY:
7345 target = expand_builtin_strncpy (exp, target);
7346 if (target)
7347 return target;
7348 break;
7350 case BUILT_IN_STPCPY:
7351 target = expand_builtin_stpcpy (exp, target, mode);
7352 if (target)
7353 return target;
7354 break;
7356 case BUILT_IN_STPNCPY:
7357 target = expand_builtin_stpncpy (exp, target);
7358 if (target)
7359 return target;
7360 break;
7362 case BUILT_IN_MEMCHR:
7363 target = expand_builtin_memchr (exp, target);
7364 if (target)
7365 return target;
7366 break;
7368 case BUILT_IN_MEMCPY:
7369 target = expand_builtin_memcpy (exp, target);
7370 if (target)
7371 return target;
7372 break;
7374 case BUILT_IN_MEMMOVE:
7375 target = expand_builtin_memmove (exp, target);
7376 if (target)
7377 return target;
7378 break;
7380 case BUILT_IN_MEMPCPY:
7381 target = expand_builtin_mempcpy (exp, target);
7382 if (target)
7383 return target;
7384 break;
7386 case BUILT_IN_MEMSET:
7387 target = expand_builtin_memset (exp, target, mode);
7388 if (target)
7389 return target;
7390 break;
7392 case BUILT_IN_BZERO:
7393 target = expand_builtin_bzero (exp);
7394 if (target)
7395 return target;
7396 break;
7398 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7399 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7400 when changing it to a strcmp call. */
7401 case BUILT_IN_STRCMP_EQ:
7402 target = expand_builtin_memcmp (exp, target, true);
7403 if (target)
7404 return target;
7406 /* Change this call back to a BUILT_IN_STRCMP. */
7407 TREE_OPERAND (exp, 1)
7408 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7410 /* Delete the last parameter. */
7411 unsigned int i;
7412 vec<tree, va_gc> *arg_vec;
7413 vec_alloc (arg_vec, 2);
7414 for (i = 0; i < 2; i++)
7415 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7416 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7417 /* FALLTHROUGH */
7419 case BUILT_IN_STRCMP:
7420 target = expand_builtin_strcmp (exp, target);
7421 if (target)
7422 return target;
7423 break;
7425 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7426 back to a BUILT_IN_STRNCMP. */
7427 case BUILT_IN_STRNCMP_EQ:
7428 target = expand_builtin_memcmp (exp, target, true);
7429 if (target)
7430 return target;
7432 /* Change it back to a BUILT_IN_STRNCMP. */
7433 TREE_OPERAND (exp, 1)
7434 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7435 /* FALLTHROUGH */
7437 case BUILT_IN_STRNCMP:
7438 target = expand_builtin_strncmp (exp, target, mode);
7439 if (target)
7440 return target;
7441 break;
7443 case BUILT_IN_BCMP:
7444 case BUILT_IN_MEMCMP:
7445 case BUILT_IN_MEMCMP_EQ:
7446 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7447 if (target)
7448 return target;
7449 if (fcode == BUILT_IN_MEMCMP_EQ)
7451 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7452 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7454 break;
7456 case BUILT_IN_SETJMP:
7457 /* This should have been lowered to the builtins below. */
7458 gcc_unreachable ();
7460 case BUILT_IN_SETJMP_SETUP:
7461 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7462 and the receiver label. */
7463 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7465 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7466 VOIDmode, EXPAND_NORMAL);
7467 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7468 rtx_insn *label_r = label_rtx (label);
7470 /* This is copied from the handling of non-local gotos. */
7471 expand_builtin_setjmp_setup (buf_addr, label_r);
7472 nonlocal_goto_handler_labels
7473 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7474 nonlocal_goto_handler_labels);
7475 /* ??? Do not let expand_label treat us as such since we would
7476 not want to be both on the list of non-local labels and on
7477 the list of forced labels. */
7478 FORCED_LABEL (label) = 0;
7479 return const0_rtx;
7481 break;
7483 case BUILT_IN_SETJMP_RECEIVER:
7484 /* __builtin_setjmp_receiver is passed the receiver label. */
7485 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7487 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7488 rtx_insn *label_r = label_rtx (label);
7490 expand_builtin_setjmp_receiver (label_r);
7491 return const0_rtx;
7493 break;
7495 /* __builtin_longjmp is passed a pointer to an array of five words.
7496 It's similar to the C library longjmp function but works with
7497 __builtin_setjmp above. */
7498 case BUILT_IN_LONGJMP:
7499 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7501 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7502 VOIDmode, EXPAND_NORMAL);
7503 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7505 if (value != const1_rtx)
7507 error ("%<__builtin_longjmp%> second argument must be 1");
7508 return const0_rtx;
7511 expand_builtin_longjmp (buf_addr, value);
7512 return const0_rtx;
7514 break;
7516 case BUILT_IN_NONLOCAL_GOTO:
7517 target = expand_builtin_nonlocal_goto (exp);
7518 if (target)
7519 return target;
7520 break;
7522 /* This updates the setjmp buffer that is its argument with the value
7523 of the current stack pointer. */
7524 case BUILT_IN_UPDATE_SETJMP_BUF:
7525 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7527 rtx buf_addr
7528 = expand_normal (CALL_EXPR_ARG (exp, 0));
7530 expand_builtin_update_setjmp_buf (buf_addr);
7531 return const0_rtx;
7533 break;
7535 case BUILT_IN_TRAP:
7536 expand_builtin_trap ();
7537 return const0_rtx;
7539 case BUILT_IN_UNREACHABLE:
7540 expand_builtin_unreachable ();
7541 return const0_rtx;
7543 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7544 case BUILT_IN_SIGNBITD32:
7545 case BUILT_IN_SIGNBITD64:
7546 case BUILT_IN_SIGNBITD128:
7547 target = expand_builtin_signbit (exp, target);
7548 if (target)
7549 return target;
7550 break;
7552 /* Various hooks for the DWARF 2 __throw routine. */
7553 case BUILT_IN_UNWIND_INIT:
7554 expand_builtin_unwind_init ();
7555 return const0_rtx;
7556 case BUILT_IN_DWARF_CFA:
7557 return virtual_cfa_rtx;
7558 #ifdef DWARF2_UNWIND_INFO
7559 case BUILT_IN_DWARF_SP_COLUMN:
7560 return expand_builtin_dwarf_sp_column ();
7561 case BUILT_IN_INIT_DWARF_REG_SIZES:
7562 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7563 return const0_rtx;
7564 #endif
7565 case BUILT_IN_FROB_RETURN_ADDR:
7566 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7567 case BUILT_IN_EXTRACT_RETURN_ADDR:
7568 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7569 case BUILT_IN_EH_RETURN:
7570 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7571 CALL_EXPR_ARG (exp, 1));
7572 return const0_rtx;
7573 case BUILT_IN_EH_RETURN_DATA_REGNO:
7574 return expand_builtin_eh_return_data_regno (exp);
7575 case BUILT_IN_EXTEND_POINTER:
7576 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7577 case BUILT_IN_EH_POINTER:
7578 return expand_builtin_eh_pointer (exp);
7579 case BUILT_IN_EH_FILTER:
7580 return expand_builtin_eh_filter (exp);
7581 case BUILT_IN_EH_COPY_VALUES:
7582 return expand_builtin_eh_copy_values (exp);
7584 case BUILT_IN_VA_START:
7585 return expand_builtin_va_start (exp);
7586 case BUILT_IN_VA_END:
7587 return expand_builtin_va_end (exp);
7588 case BUILT_IN_VA_COPY:
7589 return expand_builtin_va_copy (exp);
7590 case BUILT_IN_EXPECT:
7591 return expand_builtin_expect (exp, target);
7592 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7593 return expand_builtin_expect_with_probability (exp, target);
7594 case BUILT_IN_ASSUME_ALIGNED:
7595 return expand_builtin_assume_aligned (exp, target);
7596 case BUILT_IN_PREFETCH:
7597 expand_builtin_prefetch (exp);
7598 return const0_rtx;
7600 case BUILT_IN_INIT_TRAMPOLINE:
7601 return expand_builtin_init_trampoline (exp, true);
7602 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7603 return expand_builtin_init_trampoline (exp, false);
7604 case BUILT_IN_ADJUST_TRAMPOLINE:
7605 return expand_builtin_adjust_trampoline (exp);
7607 case BUILT_IN_INIT_DESCRIPTOR:
7608 return expand_builtin_init_descriptor (exp);
7609 case BUILT_IN_ADJUST_DESCRIPTOR:
7610 return expand_builtin_adjust_descriptor (exp);
7612 case BUILT_IN_FORK:
7613 case BUILT_IN_EXECL:
7614 case BUILT_IN_EXECV:
7615 case BUILT_IN_EXECLP:
7616 case BUILT_IN_EXECLE:
7617 case BUILT_IN_EXECVP:
7618 case BUILT_IN_EXECVE:
7619 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7620 if (target)
7621 return target;
7622 break;
7624 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7625 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7626 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7627 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7628 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7629 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7630 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7631 if (target)
7632 return target;
7633 break;
7635 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7636 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7637 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7638 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7639 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7640 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7641 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7642 if (target)
7643 return target;
7644 break;
7646 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7647 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7648 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7649 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7650 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7651 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7652 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7653 if (target)
7654 return target;
7655 break;
7657 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7658 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7659 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7660 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7661 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7662 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7663 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7664 if (target)
7665 return target;
7666 break;
7668 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7669 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7670 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7671 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7672 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7673 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7674 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7675 if (target)
7676 return target;
7677 break;
7679 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7680 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7681 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7682 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7683 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7684 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7685 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7686 if (target)
7687 return target;
7688 break;
7690 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7691 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7692 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7693 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7694 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7695 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7696 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7697 if (target)
7698 return target;
7699 break;
7701 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7702 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7703 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7704 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7705 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7706 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7707 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7708 if (target)
7709 return target;
7710 break;
7712 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7713 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7714 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7715 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7716 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7717 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7718 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7724 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7725 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7726 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7727 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7728 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7729 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7730 if (target)
7731 return target;
7732 break;
7734 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7735 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7736 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7737 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7738 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7739 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7740 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7741 if (target)
7742 return target;
7743 break;
7745 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7746 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7747 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7748 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7749 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7750 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7751 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7752 if (target)
7753 return target;
7754 break;
7756 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7757 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7758 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7759 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7760 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7761 if (mode == VOIDmode)
7762 mode = TYPE_MODE (boolean_type_node);
7763 if (!target || !register_operand (target, mode))
7764 target = gen_reg_rtx (mode);
7766 mode = get_builtin_sync_mode
7767 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7768 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7769 if (target)
7770 return target;
7771 break;
7773 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7774 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7775 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7776 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7777 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7778 mode = get_builtin_sync_mode
7779 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7780 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7781 if (target)
7782 return target;
7783 break;
7785 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7786 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7787 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7788 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7789 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7790 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7791 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7792 if (target)
7793 return target;
7794 break;
7796 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7797 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7798 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7799 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7800 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7801 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7802 expand_builtin_sync_lock_release (mode, exp);
7803 return const0_rtx;
7805 case BUILT_IN_SYNC_SYNCHRONIZE:
7806 expand_builtin_sync_synchronize ();
7807 return const0_rtx;
7809 case BUILT_IN_ATOMIC_EXCHANGE_1:
7810 case BUILT_IN_ATOMIC_EXCHANGE_2:
7811 case BUILT_IN_ATOMIC_EXCHANGE_4:
7812 case BUILT_IN_ATOMIC_EXCHANGE_8:
7813 case BUILT_IN_ATOMIC_EXCHANGE_16:
7814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7815 target = expand_builtin_atomic_exchange (mode, exp, target);
7816 if (target)
7817 return target;
7818 break;
7820 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7821 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7822 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7823 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7824 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7826 unsigned int nargs, z;
7827 vec<tree, va_gc> *vec;
7829 mode =
7830 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7831 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7832 if (target)
7833 return target;
7835 /* If this is turned into an external library call, the weak parameter
7836 must be dropped to match the expected parameter list. */
7837 nargs = call_expr_nargs (exp);
7838 vec_alloc (vec, nargs - 1);
7839 for (z = 0; z < 3; z++)
7840 vec->quick_push (CALL_EXPR_ARG (exp, z));
7841 /* Skip the boolean weak parameter. */
7842 for (z = 4; z < 6; z++)
7843 vec->quick_push (CALL_EXPR_ARG (exp, z));
7844 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7845 break;
7848 case BUILT_IN_ATOMIC_LOAD_1:
7849 case BUILT_IN_ATOMIC_LOAD_2:
7850 case BUILT_IN_ATOMIC_LOAD_4:
7851 case BUILT_IN_ATOMIC_LOAD_8:
7852 case BUILT_IN_ATOMIC_LOAD_16:
7853 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7854 target = expand_builtin_atomic_load (mode, exp, target);
7855 if (target)
7856 return target;
7857 break;
7859 case BUILT_IN_ATOMIC_STORE_1:
7860 case BUILT_IN_ATOMIC_STORE_2:
7861 case BUILT_IN_ATOMIC_STORE_4:
7862 case BUILT_IN_ATOMIC_STORE_8:
7863 case BUILT_IN_ATOMIC_STORE_16:
7864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7865 target = expand_builtin_atomic_store (mode, exp);
7866 if (target)
7867 return const0_rtx;
7868 break;
7870 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7871 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7872 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7873 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7874 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7876 enum built_in_function lib;
7877 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7878 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7879 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7880 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7881 ignore, lib);
7882 if (target)
7883 return target;
7884 break;
7886 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7887 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7888 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7889 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7890 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7892 enum built_in_function lib;
7893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7894 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7895 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7896 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7897 ignore, lib);
7898 if (target)
7899 return target;
7900 break;
7902 case BUILT_IN_ATOMIC_AND_FETCH_1:
7903 case BUILT_IN_ATOMIC_AND_FETCH_2:
7904 case BUILT_IN_ATOMIC_AND_FETCH_4:
7905 case BUILT_IN_ATOMIC_AND_FETCH_8:
7906 case BUILT_IN_ATOMIC_AND_FETCH_16:
7908 enum built_in_function lib;
7909 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7910 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7911 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7912 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7913 ignore, lib);
7914 if (target)
7915 return target;
7916 break;
7918 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7919 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7920 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7921 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7922 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7924 enum built_in_function lib;
7925 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7926 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7927 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7928 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7929 ignore, lib);
7930 if (target)
7931 return target;
7932 break;
7934 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7935 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7936 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7937 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7938 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7940 enum built_in_function lib;
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7942 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7943 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7944 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7945 ignore, lib);
7946 if (target)
7947 return target;
7948 break;
7950 case BUILT_IN_ATOMIC_OR_FETCH_1:
7951 case BUILT_IN_ATOMIC_OR_FETCH_2:
7952 case BUILT_IN_ATOMIC_OR_FETCH_4:
7953 case BUILT_IN_ATOMIC_OR_FETCH_8:
7954 case BUILT_IN_ATOMIC_OR_FETCH_16:
7956 enum built_in_function lib;
7957 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7958 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7959 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7960 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7961 ignore, lib);
7962 if (target)
7963 return target;
7964 break;
7966 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7967 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7968 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7969 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7970 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7971 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7972 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7973 ignore, BUILT_IN_NONE);
7974 if (target)
7975 return target;
7976 break;
7978 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7979 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7980 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7981 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7982 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7983 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7984 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7985 ignore, BUILT_IN_NONE);
7986 if (target)
7987 return target;
7988 break;
7990 case BUILT_IN_ATOMIC_FETCH_AND_1:
7991 case BUILT_IN_ATOMIC_FETCH_AND_2:
7992 case BUILT_IN_ATOMIC_FETCH_AND_4:
7993 case BUILT_IN_ATOMIC_FETCH_AND_8:
7994 case BUILT_IN_ATOMIC_FETCH_AND_16:
7995 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7996 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7997 ignore, BUILT_IN_NONE);
7998 if (target)
7999 return target;
8000 break;
8002 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8003 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8004 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8005 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8006 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8007 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8008 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8009 ignore, BUILT_IN_NONE);
8010 if (target)
8011 return target;
8012 break;
8014 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8015 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8016 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8017 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8018 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8019 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8020 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8021 ignore, BUILT_IN_NONE);
8022 if (target)
8023 return target;
8024 break;
8026 case BUILT_IN_ATOMIC_FETCH_OR_1:
8027 case BUILT_IN_ATOMIC_FETCH_OR_2:
8028 case BUILT_IN_ATOMIC_FETCH_OR_4:
8029 case BUILT_IN_ATOMIC_FETCH_OR_8:
8030 case BUILT_IN_ATOMIC_FETCH_OR_16:
8031 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8032 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8033 ignore, BUILT_IN_NONE);
8034 if (target)
8035 return target;
8036 break;
8038 case BUILT_IN_ATOMIC_TEST_AND_SET:
8039 return expand_builtin_atomic_test_and_set (exp, target);
8041 case BUILT_IN_ATOMIC_CLEAR:
8042 return expand_builtin_atomic_clear (exp);
8044 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8045 return expand_builtin_atomic_always_lock_free (exp);
8047 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8048 target = expand_builtin_atomic_is_lock_free (exp);
8049 if (target)
8050 return target;
8051 break;
8053 case BUILT_IN_ATOMIC_THREAD_FENCE:
8054 expand_builtin_atomic_thread_fence (exp);
8055 return const0_rtx;
8057 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8058 expand_builtin_atomic_signal_fence (exp);
8059 return const0_rtx;
8061 case BUILT_IN_OBJECT_SIZE:
8062 return expand_builtin_object_size (exp);
8064 case BUILT_IN_MEMCPY_CHK:
8065 case BUILT_IN_MEMPCPY_CHK:
8066 case BUILT_IN_MEMMOVE_CHK:
8067 case BUILT_IN_MEMSET_CHK:
8068 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8069 if (target)
8070 return target;
8071 break;
8073 case BUILT_IN_STRCPY_CHK:
8074 case BUILT_IN_STPCPY_CHK:
8075 case BUILT_IN_STRNCPY_CHK:
8076 case BUILT_IN_STPNCPY_CHK:
8077 case BUILT_IN_STRCAT_CHK:
8078 case BUILT_IN_STRNCAT_CHK:
8079 case BUILT_IN_SNPRINTF_CHK:
8080 case BUILT_IN_VSNPRINTF_CHK:
8081 maybe_emit_chk_warning (exp, fcode);
8082 break;
8084 case BUILT_IN_SPRINTF_CHK:
8085 case BUILT_IN_VSPRINTF_CHK:
8086 maybe_emit_sprintf_chk_warning (exp, fcode);
8087 break;
8089 case BUILT_IN_FREE:
8090 if (warn_free_nonheap_object)
8091 maybe_emit_free_warning (exp);
8092 break;
8094 case BUILT_IN_THREAD_POINTER:
8095 return expand_builtin_thread_pointer (exp, target);
8097 case BUILT_IN_SET_THREAD_POINTER:
8098 expand_builtin_set_thread_pointer (exp);
8099 return const0_rtx;
8101 case BUILT_IN_ACC_ON_DEVICE:
8102 /* Do library call, if we failed to expand the builtin when
8103 folding. */
8104 break;
8106 case BUILT_IN_GOACC_PARLEVEL_ID:
8107 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8108 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8110 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8111 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8113 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8114 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8115 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8116 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8117 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8118 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8119 return expand_speculation_safe_value (mode, exp, target, ignore);
8121 default: /* just do library call, if unknown builtin */
8122 break;
8125 /* The switch statement above can drop through to cause the function
8126 to be called normally. */
8127 return expand_call (exp, target, ignore);
8130 /* Determine whether a tree node represents a call to a built-in
8131 function. If the tree T is a call to a built-in function with
8132 the right number of arguments of the appropriate types, return
8133 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8134 Otherwise the return value is END_BUILTINS. */
8136 enum built_in_function
8137 builtin_mathfn_code (const_tree t)
8139 const_tree fndecl, arg, parmlist;
8140 const_tree argtype, parmtype;
8141 const_call_expr_arg_iterator iter;
8143 if (TREE_CODE (t) != CALL_EXPR)
8144 return END_BUILTINS;
8146 fndecl = get_callee_fndecl (t);
8147 if (fndecl == NULL_TREE
8148 || TREE_CODE (fndecl) != FUNCTION_DECL
8149 || ! DECL_BUILT_IN (fndecl)
8150 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8151 return END_BUILTINS;
8153 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8154 init_const_call_expr_arg_iterator (t, &iter);
8155 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8157 /* If a function doesn't take a variable number of arguments,
8158 the last element in the list will have type `void'. */
8159 parmtype = TREE_VALUE (parmlist);
8160 if (VOID_TYPE_P (parmtype))
8162 if (more_const_call_expr_args_p (&iter))
8163 return END_BUILTINS;
8164 return DECL_FUNCTION_CODE (fndecl);
8167 if (! more_const_call_expr_args_p (&iter))
8168 return END_BUILTINS;
8170 arg = next_const_call_expr_arg (&iter);
8171 argtype = TREE_TYPE (arg);
8173 if (SCALAR_FLOAT_TYPE_P (parmtype))
8175 if (! SCALAR_FLOAT_TYPE_P (argtype))
8176 return END_BUILTINS;
8178 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8180 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8181 return END_BUILTINS;
8183 else if (POINTER_TYPE_P (parmtype))
8185 if (! POINTER_TYPE_P (argtype))
8186 return END_BUILTINS;
8188 else if (INTEGRAL_TYPE_P (parmtype))
8190 if (! INTEGRAL_TYPE_P (argtype))
8191 return END_BUILTINS;
8193 else
8194 return END_BUILTINS;
8197 /* Variable-length argument list. */
8198 return DECL_FUNCTION_CODE (fndecl);
8201 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8202 evaluate to a constant. */
8204 static tree
8205 fold_builtin_constant_p (tree arg)
8207 /* We return 1 for a numeric type that's known to be a constant
8208 value at compile-time or for an aggregate type that's a
8209 literal constant. */
8210 STRIP_NOPS (arg);
8212 /* If we know this is a constant, emit the constant of one. */
8213 if (CONSTANT_CLASS_P (arg)
8214 || (TREE_CODE (arg) == CONSTRUCTOR
8215 && TREE_CONSTANT (arg)))
8216 return integer_one_node;
8217 if (TREE_CODE (arg) == ADDR_EXPR)
8219 tree op = TREE_OPERAND (arg, 0);
8220 if (TREE_CODE (op) == STRING_CST
8221 || (TREE_CODE (op) == ARRAY_REF
8222 && integer_zerop (TREE_OPERAND (op, 1))
8223 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8224 return integer_one_node;
8227 /* If this expression has side effects, show we don't know it to be a
8228 constant. Likewise if it's a pointer or aggregate type since in
8229 those case we only want literals, since those are only optimized
8230 when generating RTL, not later.
8231 And finally, if we are compiling an initializer, not code, we
8232 need to return a definite result now; there's not going to be any
8233 more optimization done. */
8234 if (TREE_SIDE_EFFECTS (arg)
8235 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8236 || POINTER_TYPE_P (TREE_TYPE (arg))
8237 || cfun == 0
8238 || folding_initializer
8239 || force_folding_builtin_constant_p)
8240 return integer_zero_node;
8242 return NULL_TREE;
8245 /* Create builtin_expect or builtin_expect_with_probability
8246 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8247 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8248 builtin_expect_with_probability instead uses third argument as PROBABILITY
8249 value. */
8251 static tree
8252 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8253 tree predictor, tree probability)
8255 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8257 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8258 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8259 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8260 ret_type = TREE_TYPE (TREE_TYPE (fn));
8261 pred_type = TREE_VALUE (arg_types);
8262 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8264 pred = fold_convert_loc (loc, pred_type, pred);
8265 expected = fold_convert_loc (loc, expected_type, expected);
8267 if (probability)
8268 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8269 else
8270 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8271 predictor);
8273 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8274 build_int_cst (ret_type, 0));
8277 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8278 NULL_TREE if no simplification is possible. */
8280 tree
8281 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8282 tree arg3)
8284 tree inner, fndecl, inner_arg0;
8285 enum tree_code code;
8287 /* Distribute the expected value over short-circuiting operators.
8288 See through the cast from truthvalue_type_node to long. */
8289 inner_arg0 = arg0;
8290 while (CONVERT_EXPR_P (inner_arg0)
8291 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8292 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8293 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8295 /* If this is a builtin_expect within a builtin_expect keep the
8296 inner one. See through a comparison against a constant. It
8297 might have been added to create a thruthvalue. */
8298 inner = inner_arg0;
8300 if (COMPARISON_CLASS_P (inner)
8301 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8302 inner = TREE_OPERAND (inner, 0);
8304 if (TREE_CODE (inner) == CALL_EXPR
8305 && (fndecl = get_callee_fndecl (inner))
8306 && (DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL, BUILT_IN_EXPECT)
8307 || DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL,
8308 BUILT_IN_EXPECT_WITH_PROBABILITY)))
8309 return arg0;
8311 inner = inner_arg0;
8312 code = TREE_CODE (inner);
8313 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8315 tree op0 = TREE_OPERAND (inner, 0);
8316 tree op1 = TREE_OPERAND (inner, 1);
8317 arg1 = save_expr (arg1);
8319 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8320 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8321 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8323 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8326 /* If the argument isn't invariant then there's nothing else we can do. */
8327 if (!TREE_CONSTANT (inner_arg0))
8328 return NULL_TREE;
8330 /* If we expect that a comparison against the argument will fold to
8331 a constant return the constant. In practice, this means a true
8332 constant or the address of a non-weak symbol. */
8333 inner = inner_arg0;
8334 STRIP_NOPS (inner);
8335 if (TREE_CODE (inner) == ADDR_EXPR)
8339 inner = TREE_OPERAND (inner, 0);
8341 while (TREE_CODE (inner) == COMPONENT_REF
8342 || TREE_CODE (inner) == ARRAY_REF);
8343 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8344 return NULL_TREE;
8347 /* Otherwise, ARG0 already has the proper type for the return value. */
8348 return arg0;
8351 /* Fold a call to __builtin_classify_type with argument ARG. */
8353 static tree
8354 fold_builtin_classify_type (tree arg)
8356 if (arg == 0)
8357 return build_int_cst (integer_type_node, no_type_class);
8359 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8362 /* Fold a call to __builtin_strlen with argument ARG. */
8364 static tree
8365 fold_builtin_strlen (location_t loc, tree type, tree arg)
8367 if (!validate_arg (arg, POINTER_TYPE))
8368 return NULL_TREE;
8369 else
8371 tree len = c_strlen (arg, 0);
8373 if (len)
8374 return fold_convert_loc (loc, type, len);
8376 return NULL_TREE;
8380 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8382 static tree
8383 fold_builtin_inf (location_t loc, tree type, int warn)
8385 REAL_VALUE_TYPE real;
8387 /* __builtin_inff is intended to be usable to define INFINITY on all
8388 targets. If an infinity is not available, INFINITY expands "to a
8389 positive constant of type float that overflows at translation
8390 time", footnote "In this case, using INFINITY will violate the
8391 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8392 Thus we pedwarn to ensure this constraint violation is
8393 diagnosed. */
8394 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8395 pedwarn (loc, 0, "target format does not support infinity");
8397 real_inf (&real);
8398 return build_real (type, real);
8401 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8402 NULL_TREE if no simplification can be made. */
8404 static tree
8405 fold_builtin_sincos (location_t loc,
8406 tree arg0, tree arg1, tree arg2)
8408 tree type;
8409 tree fndecl, call = NULL_TREE;
8411 if (!validate_arg (arg0, REAL_TYPE)
8412 || !validate_arg (arg1, POINTER_TYPE)
8413 || !validate_arg (arg2, POINTER_TYPE))
8414 return NULL_TREE;
8416 type = TREE_TYPE (arg0);
8418 /* Calculate the result when the argument is a constant. */
8419 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8420 if (fn == END_BUILTINS)
8421 return NULL_TREE;
8423 /* Canonicalize sincos to cexpi. */
8424 if (TREE_CODE (arg0) == REAL_CST)
8426 tree complex_type = build_complex_type (type);
8427 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8429 if (!call)
8431 if (!targetm.libc_has_function (function_c99_math_complex)
8432 || !builtin_decl_implicit_p (fn))
8433 return NULL_TREE;
8434 fndecl = builtin_decl_explicit (fn);
8435 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8436 call = builtin_save_expr (call);
8439 tree ptype = build_pointer_type (type);
8440 arg1 = fold_convert (ptype, arg1);
8441 arg2 = fold_convert (ptype, arg2);
8442 return build2 (COMPOUND_EXPR, void_type_node,
8443 build2 (MODIFY_EXPR, void_type_node,
8444 build_fold_indirect_ref_loc (loc, arg1),
8445 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8446 build2 (MODIFY_EXPR, void_type_node,
8447 build_fold_indirect_ref_loc (loc, arg2),
8448 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8451 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8452 Return NULL_TREE if no simplification can be made. */
8454 static tree
8455 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8457 if (!validate_arg (arg1, POINTER_TYPE)
8458 || !validate_arg (arg2, POINTER_TYPE)
8459 || !validate_arg (len, INTEGER_TYPE))
8460 return NULL_TREE;
8462 /* If the LEN parameter is zero, return zero. */
8463 if (integer_zerop (len))
8464 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8465 arg1, arg2);
8467 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8468 if (operand_equal_p (arg1, arg2, 0))
8469 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8471 /* If len parameter is one, return an expression corresponding to
8472 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8473 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8475 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8476 tree cst_uchar_ptr_node
8477 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8479 tree ind1
8480 = fold_convert_loc (loc, integer_type_node,
8481 build1 (INDIRECT_REF, cst_uchar_node,
8482 fold_convert_loc (loc,
8483 cst_uchar_ptr_node,
8484 arg1)));
8485 tree ind2
8486 = fold_convert_loc (loc, integer_type_node,
8487 build1 (INDIRECT_REF, cst_uchar_node,
8488 fold_convert_loc (loc,
8489 cst_uchar_ptr_node,
8490 arg2)));
8491 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8494 return NULL_TREE;
8497 /* Fold a call to builtin isascii with argument ARG. */
8499 static tree
8500 fold_builtin_isascii (location_t loc, tree arg)
8502 if (!validate_arg (arg, INTEGER_TYPE))
8503 return NULL_TREE;
8504 else
8506 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8507 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8508 build_int_cst (integer_type_node,
8509 ~ (unsigned HOST_WIDE_INT) 0x7f));
8510 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8511 arg, integer_zero_node);
8515 /* Fold a call to builtin toascii with argument ARG. */
8517 static tree
8518 fold_builtin_toascii (location_t loc, tree arg)
8520 if (!validate_arg (arg, INTEGER_TYPE))
8521 return NULL_TREE;
8523 /* Transform toascii(c) -> (c & 0x7f). */
8524 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8525 build_int_cst (integer_type_node, 0x7f));
8528 /* Fold a call to builtin isdigit with argument ARG. */
8530 static tree
8531 fold_builtin_isdigit (location_t loc, tree arg)
8533 if (!validate_arg (arg, INTEGER_TYPE))
8534 return NULL_TREE;
8535 else
8537 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8538 /* According to the C standard, isdigit is unaffected by locale.
8539 However, it definitely is affected by the target character set. */
8540 unsigned HOST_WIDE_INT target_digit0
8541 = lang_hooks.to_target_charset ('0');
8543 if (target_digit0 == 0)
8544 return NULL_TREE;
8546 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8547 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8548 build_int_cst (unsigned_type_node, target_digit0));
8549 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8550 build_int_cst (unsigned_type_node, 9));
8554 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8556 static tree
8557 fold_builtin_fabs (location_t loc, tree arg, tree type)
8559 if (!validate_arg (arg, REAL_TYPE))
8560 return NULL_TREE;
8562 arg = fold_convert_loc (loc, type, arg);
8563 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8566 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8568 static tree
8569 fold_builtin_abs (location_t loc, tree arg, tree type)
8571 if (!validate_arg (arg, INTEGER_TYPE))
8572 return NULL_TREE;
8574 arg = fold_convert_loc (loc, type, arg);
8575 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8578 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8580 static tree
8581 fold_builtin_carg (location_t loc, tree arg, tree type)
8583 if (validate_arg (arg, COMPLEX_TYPE)
8584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8586 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8588 if (atan2_fn)
8590 tree new_arg = builtin_save_expr (arg);
8591 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8592 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8593 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8597 return NULL_TREE;
8600 /* Fold a call to builtin frexp, we can assume the base is 2. */
8602 static tree
8603 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8605 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8606 return NULL_TREE;
8608 STRIP_NOPS (arg0);
8610 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8611 return NULL_TREE;
8613 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8615 /* Proceed if a valid pointer type was passed in. */
8616 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8618 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8619 tree frac, exp;
8621 switch (value->cl)
8623 case rvc_zero:
8624 /* For +-0, return (*exp = 0, +-0). */
8625 exp = integer_zero_node;
8626 frac = arg0;
8627 break;
8628 case rvc_nan:
8629 case rvc_inf:
8630 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8631 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8632 case rvc_normal:
8634 /* Since the frexp function always expects base 2, and in
8635 GCC normalized significands are already in the range
8636 [0.5, 1.0), we have exactly what frexp wants. */
8637 REAL_VALUE_TYPE frac_rvt = *value;
8638 SET_REAL_EXP (&frac_rvt, 0);
8639 frac = build_real (rettype, frac_rvt);
8640 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8642 break;
8643 default:
8644 gcc_unreachable ();
8647 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8648 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8649 TREE_SIDE_EFFECTS (arg1) = 1;
8650 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8653 return NULL_TREE;
8656 /* Fold a call to builtin modf. */
8658 static tree
8659 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8661 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8662 return NULL_TREE;
8664 STRIP_NOPS (arg0);
8666 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8667 return NULL_TREE;
8669 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8671 /* Proceed if a valid pointer type was passed in. */
8672 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8674 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8675 REAL_VALUE_TYPE trunc, frac;
8677 switch (value->cl)
8679 case rvc_nan:
8680 case rvc_zero:
8681 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8682 trunc = frac = *value;
8683 break;
8684 case rvc_inf:
8685 /* For +-Inf, return (*arg1 = arg0, +-0). */
8686 frac = dconst0;
8687 frac.sign = value->sign;
8688 trunc = *value;
8689 break;
8690 case rvc_normal:
8691 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8692 real_trunc (&trunc, VOIDmode, value);
8693 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8694 /* If the original number was negative and already
8695 integral, then the fractional part is -0.0. */
8696 if (value->sign && frac.cl == rvc_zero)
8697 frac.sign = value->sign;
8698 break;
8701 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8702 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8703 build_real (rettype, trunc));
8704 TREE_SIDE_EFFECTS (arg1) = 1;
8705 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8706 build_real (rettype, frac));
8709 return NULL_TREE;
8712 /* Given a location LOC, an interclass builtin function decl FNDECL
8713 and its single argument ARG, return an folded expression computing
8714 the same, or NULL_TREE if we either couldn't or didn't want to fold
8715 (the latter happen if there's an RTL instruction available). */
8717 static tree
8718 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8720 machine_mode mode;
8722 if (!validate_arg (arg, REAL_TYPE))
8723 return NULL_TREE;
8725 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8726 return NULL_TREE;
8728 mode = TYPE_MODE (TREE_TYPE (arg));
8730 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8732 /* If there is no optab, try generic code. */
8733 switch (DECL_FUNCTION_CODE (fndecl))
8735 tree result;
8737 CASE_FLT_FN (BUILT_IN_ISINF):
8739 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8740 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8741 tree type = TREE_TYPE (arg);
8742 REAL_VALUE_TYPE r;
8743 char buf[128];
8745 if (is_ibm_extended)
8747 /* NaN and Inf are encoded in the high-order double value
8748 only. The low-order value is not significant. */
8749 type = double_type_node;
8750 mode = DFmode;
8751 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8753 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8754 real_from_string (&r, buf);
8755 result = build_call_expr (isgr_fn, 2,
8756 fold_build1_loc (loc, ABS_EXPR, type, arg),
8757 build_real (type, r));
8758 return result;
8760 CASE_FLT_FN (BUILT_IN_FINITE):
8761 case BUILT_IN_ISFINITE:
8763 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8764 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8765 tree type = TREE_TYPE (arg);
8766 REAL_VALUE_TYPE r;
8767 char buf[128];
8769 if (is_ibm_extended)
8771 /* NaN and Inf are encoded in the high-order double value
8772 only. The low-order value is not significant. */
8773 type = double_type_node;
8774 mode = DFmode;
8775 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8777 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8778 real_from_string (&r, buf);
8779 result = build_call_expr (isle_fn, 2,
8780 fold_build1_loc (loc, ABS_EXPR, type, arg),
8781 build_real (type, r));
8782 /*result = fold_build2_loc (loc, UNGT_EXPR,
8783 TREE_TYPE (TREE_TYPE (fndecl)),
8784 fold_build1_loc (loc, ABS_EXPR, type, arg),
8785 build_real (type, r));
8786 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8787 TREE_TYPE (TREE_TYPE (fndecl)),
8788 result);*/
8789 return result;
8791 case BUILT_IN_ISNORMAL:
8793 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8794 islessequal(fabs(x),DBL_MAX). */
8795 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8796 tree type = TREE_TYPE (arg);
8797 tree orig_arg, max_exp, min_exp;
8798 machine_mode orig_mode = mode;
8799 REAL_VALUE_TYPE rmax, rmin;
8800 char buf[128];
8802 orig_arg = arg = builtin_save_expr (arg);
8803 if (is_ibm_extended)
8805 /* Use double to test the normal range of IBM extended
8806 precision. Emin for IBM extended precision is
8807 different to emin for IEEE double, being 53 higher
8808 since the low double exponent is at least 53 lower
8809 than the high double exponent. */
8810 type = double_type_node;
8811 mode = DFmode;
8812 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8814 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8816 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8817 real_from_string (&rmax, buf);
8818 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8819 real_from_string (&rmin, buf);
8820 max_exp = build_real (type, rmax);
8821 min_exp = build_real (type, rmin);
8823 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8824 if (is_ibm_extended)
8826 /* Testing the high end of the range is done just using
8827 the high double, using the same test as isfinite().
8828 For the subnormal end of the range we first test the
8829 high double, then if its magnitude is equal to the
8830 limit of 0x1p-969, we test whether the low double is
8831 non-zero and opposite sign to the high double. */
8832 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8833 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8834 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8835 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8836 arg, min_exp);
8837 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8838 complex_double_type_node, orig_arg);
8839 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8840 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8841 tree zero = build_real (type, dconst0);
8842 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8843 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8844 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8845 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8846 fold_build3 (COND_EXPR,
8847 integer_type_node,
8848 hilt, logt, lolt));
8849 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8850 eq_min, ok_lo);
8851 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8852 gt_min, eq_min);
8854 else
8856 tree const isge_fn
8857 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8858 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8860 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8861 max_exp, min_exp);
8862 return result;
8864 default:
8865 break;
8868 return NULL_TREE;
8871 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8872 ARG is the argument for the call. */
8874 static tree
8875 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8877 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8879 if (!validate_arg (arg, REAL_TYPE))
8880 return NULL_TREE;
8882 switch (builtin_index)
8884 case BUILT_IN_ISINF:
8885 if (!HONOR_INFINITIES (arg))
8886 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8888 return NULL_TREE;
8890 case BUILT_IN_ISINF_SIGN:
8892 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8893 /* In a boolean context, GCC will fold the inner COND_EXPR to
8894 1. So e.g. "if (isinf_sign(x))" would be folded to just
8895 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8896 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8897 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8898 tree tmp = NULL_TREE;
8900 arg = builtin_save_expr (arg);
8902 if (signbit_fn && isinf_fn)
8904 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8905 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8907 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8908 signbit_call, integer_zero_node);
8909 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8910 isinf_call, integer_zero_node);
8912 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8913 integer_minus_one_node, integer_one_node);
8914 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8915 isinf_call, tmp,
8916 integer_zero_node);
8919 return tmp;
8922 case BUILT_IN_ISFINITE:
8923 if (!HONOR_NANS (arg)
8924 && !HONOR_INFINITIES (arg))
8925 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8927 return NULL_TREE;
8929 case BUILT_IN_ISNAN:
8930 if (!HONOR_NANS (arg))
8931 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8934 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8935 if (is_ibm_extended)
8937 /* NaN and Inf are encoded in the high-order double value
8938 only. The low-order value is not significant. */
8939 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8942 arg = builtin_save_expr (arg);
8943 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8945 default:
8946 gcc_unreachable ();
8950 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8951 This builtin will generate code to return the appropriate floating
8952 point classification depending on the value of the floating point
8953 number passed in. The possible return values must be supplied as
8954 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8955 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8956 one floating point argument which is "type generic". */
8958 static tree
8959 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8961 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8962 arg, type, res, tmp;
8963 machine_mode mode;
8964 REAL_VALUE_TYPE r;
8965 char buf[128];
8967 /* Verify the required arguments in the original call. */
8968 if (nargs != 6
8969 || !validate_arg (args[0], INTEGER_TYPE)
8970 || !validate_arg (args[1], INTEGER_TYPE)
8971 || !validate_arg (args[2], INTEGER_TYPE)
8972 || !validate_arg (args[3], INTEGER_TYPE)
8973 || !validate_arg (args[4], INTEGER_TYPE)
8974 || !validate_arg (args[5], REAL_TYPE))
8975 return NULL_TREE;
8977 fp_nan = args[0];
8978 fp_infinite = args[1];
8979 fp_normal = args[2];
8980 fp_subnormal = args[3];
8981 fp_zero = args[4];
8982 arg = args[5];
8983 type = TREE_TYPE (arg);
8984 mode = TYPE_MODE (type);
8985 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8987 /* fpclassify(x) ->
8988 isnan(x) ? FP_NAN :
8989 (fabs(x) == Inf ? FP_INFINITE :
8990 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8991 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8993 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8994 build_real (type, dconst0));
8995 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8996 tmp, fp_zero, fp_subnormal);
8998 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8999 real_from_string (&r, buf);
9000 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9001 arg, build_real (type, r));
9002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9004 if (HONOR_INFINITIES (mode))
9006 real_inf (&r);
9007 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9008 build_real (type, r));
9009 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9010 fp_infinite, res);
9013 if (HONOR_NANS (mode))
9015 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9016 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9019 return res;
9022 /* Fold a call to an unordered comparison function such as
9023 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9024 being called and ARG0 and ARG1 are the arguments for the call.
9025 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9026 the opposite of the desired result. UNORDERED_CODE is used
9027 for modes that can hold NaNs and ORDERED_CODE is used for
9028 the rest. */
9030 static tree
9031 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9032 enum tree_code unordered_code,
9033 enum tree_code ordered_code)
9035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9036 enum tree_code code;
9037 tree type0, type1;
9038 enum tree_code code0, code1;
9039 tree cmp_type = NULL_TREE;
9041 type0 = TREE_TYPE (arg0);
9042 type1 = TREE_TYPE (arg1);
9044 code0 = TREE_CODE (type0);
9045 code1 = TREE_CODE (type1);
9047 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9048 /* Choose the wider of two real types. */
9049 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9050 ? type0 : type1;
9051 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9052 cmp_type = type0;
9053 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9054 cmp_type = type1;
9056 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9057 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9059 if (unordered_code == UNORDERED_EXPR)
9061 if (!HONOR_NANS (arg0))
9062 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9063 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9066 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9067 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9068 fold_build2_loc (loc, code, type, arg0, arg1));
9071 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9072 arithmetics if it can never overflow, or into internal functions that
9073 return both result of arithmetics and overflowed boolean flag in
9074 a complex integer result, or some other check for overflow.
9075 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9076 checking part of that. */
9078 static tree
9079 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9080 tree arg0, tree arg1, tree arg2)
9082 enum internal_fn ifn = IFN_LAST;
9083 /* The code of the expression corresponding to the type-generic
9084 built-in, or ERROR_MARK for the type-specific ones. */
9085 enum tree_code opcode = ERROR_MARK;
9086 bool ovf_only = false;
9088 switch (fcode)
9090 case BUILT_IN_ADD_OVERFLOW_P:
9091 ovf_only = true;
9092 /* FALLTHRU */
9093 case BUILT_IN_ADD_OVERFLOW:
9094 opcode = PLUS_EXPR;
9095 /* FALLTHRU */
9096 case BUILT_IN_SADD_OVERFLOW:
9097 case BUILT_IN_SADDL_OVERFLOW:
9098 case BUILT_IN_SADDLL_OVERFLOW:
9099 case BUILT_IN_UADD_OVERFLOW:
9100 case BUILT_IN_UADDL_OVERFLOW:
9101 case BUILT_IN_UADDLL_OVERFLOW:
9102 ifn = IFN_ADD_OVERFLOW;
9103 break;
9104 case BUILT_IN_SUB_OVERFLOW_P:
9105 ovf_only = true;
9106 /* FALLTHRU */
9107 case BUILT_IN_SUB_OVERFLOW:
9108 opcode = MINUS_EXPR;
9109 /* FALLTHRU */
9110 case BUILT_IN_SSUB_OVERFLOW:
9111 case BUILT_IN_SSUBL_OVERFLOW:
9112 case BUILT_IN_SSUBLL_OVERFLOW:
9113 case BUILT_IN_USUB_OVERFLOW:
9114 case BUILT_IN_USUBL_OVERFLOW:
9115 case BUILT_IN_USUBLL_OVERFLOW:
9116 ifn = IFN_SUB_OVERFLOW;
9117 break;
9118 case BUILT_IN_MUL_OVERFLOW_P:
9119 ovf_only = true;
9120 /* FALLTHRU */
9121 case BUILT_IN_MUL_OVERFLOW:
9122 opcode = MULT_EXPR;
9123 /* FALLTHRU */
9124 case BUILT_IN_SMUL_OVERFLOW:
9125 case BUILT_IN_SMULL_OVERFLOW:
9126 case BUILT_IN_SMULLL_OVERFLOW:
9127 case BUILT_IN_UMUL_OVERFLOW:
9128 case BUILT_IN_UMULL_OVERFLOW:
9129 case BUILT_IN_UMULLL_OVERFLOW:
9130 ifn = IFN_MUL_OVERFLOW;
9131 break;
9132 default:
9133 gcc_unreachable ();
9136 /* For the "generic" overloads, the first two arguments can have different
9137 types and the last argument determines the target type to use to check
9138 for overflow. The arguments of the other overloads all have the same
9139 type. */
9140 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9142 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9143 arguments are constant, attempt to fold the built-in call into a constant
9144 expression indicating whether or not it detected an overflow. */
9145 if (ovf_only
9146 && TREE_CODE (arg0) == INTEGER_CST
9147 && TREE_CODE (arg1) == INTEGER_CST)
9148 /* Perform the computation in the target type and check for overflow. */
9149 return omit_one_operand_loc (loc, boolean_type_node,
9150 arith_overflowed_p (opcode, type, arg0, arg1)
9151 ? boolean_true_node : boolean_false_node,
9152 arg2);
9154 tree ctype = build_complex_type (type);
9155 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9156 2, arg0, arg1);
9157 tree tgt = save_expr (call);
9158 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9159 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9160 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9162 if (ovf_only)
9163 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9165 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9166 tree store
9167 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9168 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9171 /* Fold a call to __builtin_FILE to a constant string. */
9173 static inline tree
9174 fold_builtin_FILE (location_t loc)
9176 if (const char *fname = LOCATION_FILE (loc))
9178 /* The documentation says this builtin is equivalent to the preprocessor
9179 __FILE__ macro so it appears appropriate to use the same file prefix
9180 mappings. */
9181 fname = remap_macro_filename (fname);
9182 return build_string_literal (strlen (fname) + 1, fname);
9185 return build_string_literal (1, "");
9188 /* Fold a call to __builtin_FUNCTION to a constant string. */
9190 static inline tree
9191 fold_builtin_FUNCTION ()
9193 const char *name = "";
9195 if (current_function_decl)
9196 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9198 return build_string_literal (strlen (name) + 1, name);
9201 /* Fold a call to __builtin_LINE to an integer constant. */
9203 static inline tree
9204 fold_builtin_LINE (location_t loc, tree type)
9206 return build_int_cst (type, LOCATION_LINE (loc));
9209 /* Fold a call to built-in function FNDECL with 0 arguments.
9210 This function returns NULL_TREE if no simplification was possible. */
9212 static tree
9213 fold_builtin_0 (location_t loc, tree fndecl)
9215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9217 switch (fcode)
9219 case BUILT_IN_FILE:
9220 return fold_builtin_FILE (loc);
9222 case BUILT_IN_FUNCTION:
9223 return fold_builtin_FUNCTION ();
9225 case BUILT_IN_LINE:
9226 return fold_builtin_LINE (loc, type);
9228 CASE_FLT_FN (BUILT_IN_INF):
9229 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9230 case BUILT_IN_INFD32:
9231 case BUILT_IN_INFD64:
9232 case BUILT_IN_INFD128:
9233 return fold_builtin_inf (loc, type, true);
9235 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9236 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9237 return fold_builtin_inf (loc, type, false);
9239 case BUILT_IN_CLASSIFY_TYPE:
9240 return fold_builtin_classify_type (NULL_TREE);
9242 default:
9243 break;
9245 return NULL_TREE;
9248 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9249 This function returns NULL_TREE if no simplification was possible. */
9251 static tree
9252 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9254 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9255 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9257 if (TREE_CODE (arg0) == ERROR_MARK)
9258 return NULL_TREE;
9260 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9261 return ret;
9263 switch (fcode)
9265 case BUILT_IN_CONSTANT_P:
9267 tree val = fold_builtin_constant_p (arg0);
9269 /* Gimplification will pull the CALL_EXPR for the builtin out of
9270 an if condition. When not optimizing, we'll not CSE it back.
9271 To avoid link error types of regressions, return false now. */
9272 if (!val && !optimize)
9273 val = integer_zero_node;
9275 return val;
9278 case BUILT_IN_CLASSIFY_TYPE:
9279 return fold_builtin_classify_type (arg0);
9281 case BUILT_IN_STRLEN:
9282 return fold_builtin_strlen (loc, type, arg0);
9284 CASE_FLT_FN (BUILT_IN_FABS):
9285 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9286 case BUILT_IN_FABSD32:
9287 case BUILT_IN_FABSD64:
9288 case BUILT_IN_FABSD128:
9289 return fold_builtin_fabs (loc, arg0, type);
9291 case BUILT_IN_ABS:
9292 case BUILT_IN_LABS:
9293 case BUILT_IN_LLABS:
9294 case BUILT_IN_IMAXABS:
9295 return fold_builtin_abs (loc, arg0, type);
9297 CASE_FLT_FN (BUILT_IN_CONJ):
9298 if (validate_arg (arg0, COMPLEX_TYPE)
9299 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9300 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9301 break;
9303 CASE_FLT_FN (BUILT_IN_CREAL):
9304 if (validate_arg (arg0, COMPLEX_TYPE)
9305 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9306 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9307 break;
9309 CASE_FLT_FN (BUILT_IN_CIMAG):
9310 if (validate_arg (arg0, COMPLEX_TYPE)
9311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9312 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9313 break;
9315 CASE_FLT_FN (BUILT_IN_CARG):
9316 return fold_builtin_carg (loc, arg0, type);
9318 case BUILT_IN_ISASCII:
9319 return fold_builtin_isascii (loc, arg0);
9321 case BUILT_IN_TOASCII:
9322 return fold_builtin_toascii (loc, arg0);
9324 case BUILT_IN_ISDIGIT:
9325 return fold_builtin_isdigit (loc, arg0);
9327 CASE_FLT_FN (BUILT_IN_FINITE):
9328 case BUILT_IN_FINITED32:
9329 case BUILT_IN_FINITED64:
9330 case BUILT_IN_FINITED128:
9331 case BUILT_IN_ISFINITE:
9333 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9334 if (ret)
9335 return ret;
9336 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9339 CASE_FLT_FN (BUILT_IN_ISINF):
9340 case BUILT_IN_ISINFD32:
9341 case BUILT_IN_ISINFD64:
9342 case BUILT_IN_ISINFD128:
9344 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9345 if (ret)
9346 return ret;
9347 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9350 case BUILT_IN_ISNORMAL:
9351 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9353 case BUILT_IN_ISINF_SIGN:
9354 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9356 CASE_FLT_FN (BUILT_IN_ISNAN):
9357 case BUILT_IN_ISNAND32:
9358 case BUILT_IN_ISNAND64:
9359 case BUILT_IN_ISNAND128:
9360 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9362 case BUILT_IN_FREE:
9363 if (integer_zerop (arg0))
9364 return build_empty_stmt (loc);
9365 break;
9367 default:
9368 break;
9371 return NULL_TREE;
9375 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9376 This function returns NULL_TREE if no simplification was possible. */
9378 static tree
9379 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9381 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9382 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9384 if (TREE_CODE (arg0) == ERROR_MARK
9385 || TREE_CODE (arg1) == ERROR_MARK)
9386 return NULL_TREE;
9388 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9389 return ret;
9391 switch (fcode)
9393 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9394 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9395 if (validate_arg (arg0, REAL_TYPE)
9396 && validate_arg (arg1, POINTER_TYPE))
9397 return do_mpfr_lgamma_r (arg0, arg1, type);
9398 break;
9400 CASE_FLT_FN (BUILT_IN_FREXP):
9401 return fold_builtin_frexp (loc, arg0, arg1, type);
9403 CASE_FLT_FN (BUILT_IN_MODF):
9404 return fold_builtin_modf (loc, arg0, arg1, type);
9406 case BUILT_IN_STRSPN:
9407 return fold_builtin_strspn (loc, arg0, arg1);
9409 case BUILT_IN_STRCSPN:
9410 return fold_builtin_strcspn (loc, arg0, arg1);
9412 case BUILT_IN_STRPBRK:
9413 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9415 case BUILT_IN_EXPECT:
9416 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9418 case BUILT_IN_ISGREATER:
9419 return fold_builtin_unordered_cmp (loc, fndecl,
9420 arg0, arg1, UNLE_EXPR, LE_EXPR);
9421 case BUILT_IN_ISGREATEREQUAL:
9422 return fold_builtin_unordered_cmp (loc, fndecl,
9423 arg0, arg1, UNLT_EXPR, LT_EXPR);
9424 case BUILT_IN_ISLESS:
9425 return fold_builtin_unordered_cmp (loc, fndecl,
9426 arg0, arg1, UNGE_EXPR, GE_EXPR);
9427 case BUILT_IN_ISLESSEQUAL:
9428 return fold_builtin_unordered_cmp (loc, fndecl,
9429 arg0, arg1, UNGT_EXPR, GT_EXPR);
9430 case BUILT_IN_ISLESSGREATER:
9431 return fold_builtin_unordered_cmp (loc, fndecl,
9432 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9433 case BUILT_IN_ISUNORDERED:
9434 return fold_builtin_unordered_cmp (loc, fndecl,
9435 arg0, arg1, UNORDERED_EXPR,
9436 NOP_EXPR);
9438 /* We do the folding for va_start in the expander. */
9439 case BUILT_IN_VA_START:
9440 break;
9442 case BUILT_IN_OBJECT_SIZE:
9443 return fold_builtin_object_size (arg0, arg1);
9445 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9446 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9448 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9449 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9451 default:
9452 break;
9454 return NULL_TREE;
9457 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9458 and ARG2.
9459 This function returns NULL_TREE if no simplification was possible. */
9461 static tree
9462 fold_builtin_3 (location_t loc, tree fndecl,
9463 tree arg0, tree arg1, tree arg2)
9465 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9466 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9468 if (TREE_CODE (arg0) == ERROR_MARK
9469 || TREE_CODE (arg1) == ERROR_MARK
9470 || TREE_CODE (arg2) == ERROR_MARK)
9471 return NULL_TREE;
9473 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9474 arg0, arg1, arg2))
9475 return ret;
9477 switch (fcode)
9480 CASE_FLT_FN (BUILT_IN_SINCOS):
9481 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9483 CASE_FLT_FN (BUILT_IN_REMQUO):
9484 if (validate_arg (arg0, REAL_TYPE)
9485 && validate_arg (arg1, REAL_TYPE)
9486 && validate_arg (arg2, POINTER_TYPE))
9487 return do_mpfr_remquo (arg0, arg1, arg2);
9488 break;
9490 case BUILT_IN_MEMCMP:
9491 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9493 case BUILT_IN_EXPECT:
9494 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9496 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9497 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9499 case BUILT_IN_ADD_OVERFLOW:
9500 case BUILT_IN_SUB_OVERFLOW:
9501 case BUILT_IN_MUL_OVERFLOW:
9502 case BUILT_IN_ADD_OVERFLOW_P:
9503 case BUILT_IN_SUB_OVERFLOW_P:
9504 case BUILT_IN_MUL_OVERFLOW_P:
9505 case BUILT_IN_SADD_OVERFLOW:
9506 case BUILT_IN_SADDL_OVERFLOW:
9507 case BUILT_IN_SADDLL_OVERFLOW:
9508 case BUILT_IN_SSUB_OVERFLOW:
9509 case BUILT_IN_SSUBL_OVERFLOW:
9510 case BUILT_IN_SSUBLL_OVERFLOW:
9511 case BUILT_IN_SMUL_OVERFLOW:
9512 case BUILT_IN_SMULL_OVERFLOW:
9513 case BUILT_IN_SMULLL_OVERFLOW:
9514 case BUILT_IN_UADD_OVERFLOW:
9515 case BUILT_IN_UADDL_OVERFLOW:
9516 case BUILT_IN_UADDLL_OVERFLOW:
9517 case BUILT_IN_USUB_OVERFLOW:
9518 case BUILT_IN_USUBL_OVERFLOW:
9519 case BUILT_IN_USUBLL_OVERFLOW:
9520 case BUILT_IN_UMUL_OVERFLOW:
9521 case BUILT_IN_UMULL_OVERFLOW:
9522 case BUILT_IN_UMULLL_OVERFLOW:
9523 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9525 default:
9526 break;
9528 return NULL_TREE;
9531 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9532 arguments. IGNORE is true if the result of the
9533 function call is ignored. This function returns NULL_TREE if no
9534 simplification was possible. */
9536 tree
9537 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9539 tree ret = NULL_TREE;
9541 switch (nargs)
9543 case 0:
9544 ret = fold_builtin_0 (loc, fndecl);
9545 break;
9546 case 1:
9547 ret = fold_builtin_1 (loc, fndecl, args[0]);
9548 break;
9549 case 2:
9550 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9551 break;
9552 case 3:
9553 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9554 break;
9555 default:
9556 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9557 break;
9559 if (ret)
9561 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9562 SET_EXPR_LOCATION (ret, loc);
9563 return ret;
9565 return NULL_TREE;
9568 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9569 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9570 of arguments in ARGS to be omitted. OLDNARGS is the number of
9571 elements in ARGS. */
9573 static tree
9574 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9575 int skip, tree fndecl, int n, va_list newargs)
9577 int nargs = oldnargs - skip + n;
9578 tree *buffer;
9580 if (n > 0)
9582 int i, j;
9584 buffer = XALLOCAVEC (tree, nargs);
9585 for (i = 0; i < n; i++)
9586 buffer[i] = va_arg (newargs, tree);
9587 for (j = skip; j < oldnargs; j++, i++)
9588 buffer[i] = args[j];
9590 else
9591 buffer = args + skip;
9593 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9596 /* Return true if FNDECL shouldn't be folded right now.
9597 If a built-in function has an inline attribute always_inline
9598 wrapper, defer folding it after always_inline functions have
9599 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9600 might not be performed. */
9602 bool
9603 avoid_folding_inline_builtin (tree fndecl)
9605 return (DECL_DECLARED_INLINE_P (fndecl)
9606 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9607 && cfun
9608 && !cfun->always_inline_functions_inlined
9609 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9612 /* A wrapper function for builtin folding that prevents warnings for
9613 "statement without effect" and the like, caused by removing the
9614 call node earlier than the warning is generated. */
9616 tree
9617 fold_call_expr (location_t loc, tree exp, bool ignore)
9619 tree ret = NULL_TREE;
9620 tree fndecl = get_callee_fndecl (exp);
9621 if (fndecl
9622 && TREE_CODE (fndecl) == FUNCTION_DECL
9623 && DECL_BUILT_IN (fndecl)
9624 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9625 yet. Defer folding until we see all the arguments
9626 (after inlining). */
9627 && !CALL_EXPR_VA_ARG_PACK (exp))
9629 int nargs = call_expr_nargs (exp);
9631 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9632 instead last argument is __builtin_va_arg_pack (). Defer folding
9633 even in that case, until arguments are finalized. */
9634 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9636 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9637 if (fndecl2
9638 && TREE_CODE (fndecl2) == FUNCTION_DECL
9639 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9640 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9641 return NULL_TREE;
9644 if (avoid_folding_inline_builtin (fndecl))
9645 return NULL_TREE;
9647 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9648 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9649 CALL_EXPR_ARGP (exp), ignore);
9650 else
9652 tree *args = CALL_EXPR_ARGP (exp);
9653 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9654 if (ret)
9655 return ret;
9658 return NULL_TREE;
9661 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9662 N arguments are passed in the array ARGARRAY. Return a folded
9663 expression or NULL_TREE if no simplification was possible. */
9665 tree
9666 fold_builtin_call_array (location_t loc, tree,
9667 tree fn,
9668 int n,
9669 tree *argarray)
9671 if (TREE_CODE (fn) != ADDR_EXPR)
9672 return NULL_TREE;
9674 tree fndecl = TREE_OPERAND (fn, 0);
9675 if (TREE_CODE (fndecl) == FUNCTION_DECL
9676 && DECL_BUILT_IN (fndecl))
9678 /* If last argument is __builtin_va_arg_pack (), arguments to this
9679 function are not finalized yet. Defer folding until they are. */
9680 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9682 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9683 if (fndecl2
9684 && TREE_CODE (fndecl2) == FUNCTION_DECL
9685 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9686 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9687 return NULL_TREE;
9689 if (avoid_folding_inline_builtin (fndecl))
9690 return NULL_TREE;
9691 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9692 return targetm.fold_builtin (fndecl, n, argarray, false);
9693 else
9694 return fold_builtin_n (loc, fndecl, argarray, n, false);
9697 return NULL_TREE;
9700 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9701 along with N new arguments specified as the "..." parameters. SKIP
9702 is the number of arguments in EXP to be omitted. This function is used
9703 to do varargs-to-varargs transformations. */
9705 static tree
9706 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9708 va_list ap;
9709 tree t;
9711 va_start (ap, n);
9712 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9713 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9714 va_end (ap);
9716 return t;
9719 /* Validate a single argument ARG against a tree code CODE representing
9720 a type. Return true when argument is valid. */
9722 static bool
9723 validate_arg (const_tree arg, enum tree_code code)
9725 if (!arg)
9726 return false;
9727 else if (code == POINTER_TYPE)
9728 return POINTER_TYPE_P (TREE_TYPE (arg));
9729 else if (code == INTEGER_TYPE)
9730 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9731 return code == TREE_CODE (TREE_TYPE (arg));
9734 /* This function validates the types of a function call argument list
9735 against a specified list of tree_codes. If the last specifier is a 0,
9736 that represents an ellipses, otherwise the last specifier must be a
9737 VOID_TYPE.
9739 This is the GIMPLE version of validate_arglist. Eventually we want to
9740 completely convert builtins.c to work from GIMPLEs and the tree based
9741 validate_arglist will then be removed. */
9743 bool
9744 validate_gimple_arglist (const gcall *call, ...)
9746 enum tree_code code;
9747 bool res = 0;
9748 va_list ap;
9749 const_tree arg;
9750 size_t i;
9752 va_start (ap, call);
9753 i = 0;
9757 code = (enum tree_code) va_arg (ap, int);
9758 switch (code)
9760 case 0:
9761 /* This signifies an ellipses, any further arguments are all ok. */
9762 res = true;
9763 goto end;
9764 case VOID_TYPE:
9765 /* This signifies an endlink, if no arguments remain, return
9766 true, otherwise return false. */
9767 res = (i == gimple_call_num_args (call));
9768 goto end;
9769 default:
9770 /* If no parameters remain or the parameter's code does not
9771 match the specified code, return false. Otherwise continue
9772 checking any remaining arguments. */
9773 arg = gimple_call_arg (call, i++);
9774 if (!validate_arg (arg, code))
9775 goto end;
9776 break;
9779 while (1);
9781 /* We need gotos here since we can only have one VA_CLOSE in a
9782 function. */
9783 end: ;
9784 va_end (ap);
9786 return res;
9789 /* Default target-specific builtin expander that does nothing. */
9792 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9793 rtx target ATTRIBUTE_UNUSED,
9794 rtx subtarget ATTRIBUTE_UNUSED,
9795 machine_mode mode ATTRIBUTE_UNUSED,
9796 int ignore ATTRIBUTE_UNUSED)
9798 return NULL_RTX;
9801 /* Returns true is EXP represents data that would potentially reside
9802 in a readonly section. */
9804 bool
9805 readonly_data_expr (tree exp)
9807 STRIP_NOPS (exp);
9809 if (TREE_CODE (exp) != ADDR_EXPR)
9810 return false;
9812 exp = get_base_address (TREE_OPERAND (exp, 0));
9813 if (!exp)
9814 return false;
9816 /* Make sure we call decl_readonly_section only for trees it
9817 can handle (since it returns true for everything it doesn't
9818 understand). */
9819 if (TREE_CODE (exp) == STRING_CST
9820 || TREE_CODE (exp) == CONSTRUCTOR
9821 || (VAR_P (exp) && TREE_STATIC (exp)))
9822 return decl_readonly_section (exp, 0);
9823 else
9824 return false;
9827 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9828 to the call, and TYPE is its return type.
9830 Return NULL_TREE if no simplification was possible, otherwise return the
9831 simplified form of the call as a tree.
9833 The simplified form may be a constant or other expression which
9834 computes the same value, but in a more efficient manner (including
9835 calls to other builtin functions).
9837 The call may contain arguments which need to be evaluated, but
9838 which are not useful to determine the result of the call. In
9839 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9840 COMPOUND_EXPR will be an argument which must be evaluated.
9841 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9842 COMPOUND_EXPR in the chain will contain the tree for the simplified
9843 form of the builtin function call. */
9845 static tree
9846 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9848 if (!validate_arg (s1, POINTER_TYPE)
9849 || !validate_arg (s2, POINTER_TYPE))
9850 return NULL_TREE;
9851 else
9853 tree fn;
9854 const char *p1, *p2;
9856 p2 = c_getstr (s2);
9857 if (p2 == NULL)
9858 return NULL_TREE;
9860 p1 = c_getstr (s1);
9861 if (p1 != NULL)
9863 const char *r = strpbrk (p1, p2);
9864 tree tem;
9866 if (r == NULL)
9867 return build_int_cst (TREE_TYPE (s1), 0);
9869 /* Return an offset into the constant string argument. */
9870 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9871 return fold_convert_loc (loc, type, tem);
9874 if (p2[0] == '\0')
9875 /* strpbrk(x, "") == NULL.
9876 Evaluate and ignore s1 in case it had side-effects. */
9877 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9879 if (p2[1] != '\0')
9880 return NULL_TREE; /* Really call strpbrk. */
9882 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9883 if (!fn)
9884 return NULL_TREE;
9886 /* New argument list transforming strpbrk(s1, s2) to
9887 strchr(s1, s2[0]). */
9888 return build_call_expr_loc (loc, fn, 2, s1,
9889 build_int_cst (integer_type_node, p2[0]));
9893 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9894 to the call.
9896 Return NULL_TREE if no simplification was possible, otherwise return the
9897 simplified form of the call as a tree.
9899 The simplified form may be a constant or other expression which
9900 computes the same value, but in a more efficient manner (including
9901 calls to other builtin functions).
9903 The call may contain arguments which need to be evaluated, but
9904 which are not useful to determine the result of the call. In
9905 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9906 COMPOUND_EXPR will be an argument which must be evaluated.
9907 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9908 COMPOUND_EXPR in the chain will contain the tree for the simplified
9909 form of the builtin function call. */
9911 static tree
9912 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9914 if (!validate_arg (s1, POINTER_TYPE)
9915 || !validate_arg (s2, POINTER_TYPE))
9916 return NULL_TREE;
9917 else
9919 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9921 /* If either argument is "", return NULL_TREE. */
9922 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9923 /* Evaluate and ignore both arguments in case either one has
9924 side-effects. */
9925 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9926 s1, s2);
9927 return NULL_TREE;
9931 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9932 to the call.
9934 Return NULL_TREE if no simplification was possible, otherwise return the
9935 simplified form of the call as a tree.
9937 The simplified form may be a constant or other expression which
9938 computes the same value, but in a more efficient manner (including
9939 calls to other builtin functions).
9941 The call may contain arguments which need to be evaluated, but
9942 which are not useful to determine the result of the call. In
9943 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9944 COMPOUND_EXPR will be an argument which must be evaluated.
9945 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9946 COMPOUND_EXPR in the chain will contain the tree for the simplified
9947 form of the builtin function call. */
9949 static tree
9950 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9952 if (!validate_arg (s1, POINTER_TYPE)
9953 || !validate_arg (s2, POINTER_TYPE))
9954 return NULL_TREE;
9955 else
9957 /* If the first argument is "", return NULL_TREE. */
9958 const char *p1 = c_getstr (s1);
9959 if (p1 && *p1 == '\0')
9961 /* Evaluate and ignore argument s2 in case it has
9962 side-effects. */
9963 return omit_one_operand_loc (loc, size_type_node,
9964 size_zero_node, s2);
9967 /* If the second argument is "", return __builtin_strlen(s1). */
9968 const char *p2 = c_getstr (s2);
9969 if (p2 && *p2 == '\0')
9971 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9973 /* If the replacement _DECL isn't initialized, don't do the
9974 transformation. */
9975 if (!fn)
9976 return NULL_TREE;
9978 return build_call_expr_loc (loc, fn, 1, s1);
9980 return NULL_TREE;
9984 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9985 produced. False otherwise. This is done so that we don't output the error
9986 or warning twice or three times. */
9988 bool
9989 fold_builtin_next_arg (tree exp, bool va_start_p)
9991 tree fntype = TREE_TYPE (current_function_decl);
9992 int nargs = call_expr_nargs (exp);
9993 tree arg;
9994 /* There is good chance the current input_location points inside the
9995 definition of the va_start macro (perhaps on the token for
9996 builtin) in a system header, so warnings will not be emitted.
9997 Use the location in real source code. */
9998 source_location current_location =
9999 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10000 NULL);
10002 if (!stdarg_p (fntype))
10004 error ("%<va_start%> used in function with fixed args");
10005 return true;
10008 if (va_start_p)
10010 if (va_start_p && (nargs != 2))
10012 error ("wrong number of arguments to function %<va_start%>");
10013 return true;
10015 arg = CALL_EXPR_ARG (exp, 1);
10017 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10018 when we checked the arguments and if needed issued a warning. */
10019 else
10021 if (nargs == 0)
10023 /* Evidently an out of date version of <stdarg.h>; can't validate
10024 va_start's second argument, but can still work as intended. */
10025 warning_at (current_location,
10026 OPT_Wvarargs,
10027 "%<__builtin_next_arg%> called without an argument");
10028 return true;
10030 else if (nargs > 1)
10032 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10033 return true;
10035 arg = CALL_EXPR_ARG (exp, 0);
10038 if (TREE_CODE (arg) == SSA_NAME)
10039 arg = SSA_NAME_VAR (arg);
10041 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10042 or __builtin_next_arg (0) the first time we see it, after checking
10043 the arguments and if needed issuing a warning. */
10044 if (!integer_zerop (arg))
10046 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10048 /* Strip off all nops for the sake of the comparison. This
10049 is not quite the same as STRIP_NOPS. It does more.
10050 We must also strip off INDIRECT_EXPR for C++ reference
10051 parameters. */
10052 while (CONVERT_EXPR_P (arg)
10053 || TREE_CODE (arg) == INDIRECT_REF)
10054 arg = TREE_OPERAND (arg, 0);
10055 if (arg != last_parm)
10057 /* FIXME: Sometimes with the tree optimizers we can get the
10058 not the last argument even though the user used the last
10059 argument. We just warn and set the arg to be the last
10060 argument so that we will get wrong-code because of
10061 it. */
10062 warning_at (current_location,
10063 OPT_Wvarargs,
10064 "second parameter of %<va_start%> not last named argument");
10067 /* Undefined by C99 7.15.1.4p4 (va_start):
10068 "If the parameter parmN is declared with the register storage
10069 class, with a function or array type, or with a type that is
10070 not compatible with the type that results after application of
10071 the default argument promotions, the behavior is undefined."
10073 else if (DECL_REGISTER (arg))
10075 warning_at (current_location,
10076 OPT_Wvarargs,
10077 "undefined behavior when second parameter of "
10078 "%<va_start%> is declared with %<register%> storage");
10081 /* We want to verify the second parameter just once before the tree
10082 optimizers are run and then avoid keeping it in the tree,
10083 as otherwise we could warn even for correct code like:
10084 void foo (int i, ...)
10085 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10086 if (va_start_p)
10087 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10088 else
10089 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10091 return false;
10095 /* Expand a call EXP to __builtin_object_size. */
10097 static rtx
10098 expand_builtin_object_size (tree exp)
10100 tree ost;
10101 int object_size_type;
10102 tree fndecl = get_callee_fndecl (exp);
10104 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10106 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10107 exp, fndecl);
10108 expand_builtin_trap ();
10109 return const0_rtx;
10112 ost = CALL_EXPR_ARG (exp, 1);
10113 STRIP_NOPS (ost);
10115 if (TREE_CODE (ost) != INTEGER_CST
10116 || tree_int_cst_sgn (ost) < 0
10117 || compare_tree_int (ost, 3) > 0)
10119 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10120 exp, fndecl);
10121 expand_builtin_trap ();
10122 return const0_rtx;
10125 object_size_type = tree_to_shwi (ost);
10127 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10130 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10131 FCODE is the BUILT_IN_* to use.
10132 Return NULL_RTX if we failed; the caller should emit a normal call,
10133 otherwise try to get the result in TARGET, if convenient (and in
10134 mode MODE if that's convenient). */
10136 static rtx
10137 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10138 enum built_in_function fcode)
10140 if (!validate_arglist (exp,
10141 POINTER_TYPE,
10142 fcode == BUILT_IN_MEMSET_CHK
10143 ? INTEGER_TYPE : POINTER_TYPE,
10144 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10145 return NULL_RTX;
10147 tree dest = CALL_EXPR_ARG (exp, 0);
10148 tree src = CALL_EXPR_ARG (exp, 1);
10149 tree len = CALL_EXPR_ARG (exp, 2);
10150 tree size = CALL_EXPR_ARG (exp, 3);
10152 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10153 /*str=*/NULL_TREE, size);
10155 if (!tree_fits_uhwi_p (size))
10156 return NULL_RTX;
10158 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10160 /* Avoid transforming the checking call to an ordinary one when
10161 an overflow has been detected or when the call couldn't be
10162 validated because the size is not constant. */
10163 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10164 return NULL_RTX;
10166 tree fn = NULL_TREE;
10167 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10168 mem{cpy,pcpy,move,set} is available. */
10169 switch (fcode)
10171 case BUILT_IN_MEMCPY_CHK:
10172 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10173 break;
10174 case BUILT_IN_MEMPCPY_CHK:
10175 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10176 break;
10177 case BUILT_IN_MEMMOVE_CHK:
10178 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10179 break;
10180 case BUILT_IN_MEMSET_CHK:
10181 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10182 break;
10183 default:
10184 break;
10187 if (! fn)
10188 return NULL_RTX;
10190 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10191 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10192 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10193 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10195 else if (fcode == BUILT_IN_MEMSET_CHK)
10196 return NULL_RTX;
10197 else
10199 unsigned int dest_align = get_pointer_alignment (dest);
10201 /* If DEST is not a pointer type, call the normal function. */
10202 if (dest_align == 0)
10203 return NULL_RTX;
10205 /* If SRC and DEST are the same (and not volatile), do nothing. */
10206 if (operand_equal_p (src, dest, 0))
10208 tree expr;
10210 if (fcode != BUILT_IN_MEMPCPY_CHK)
10212 /* Evaluate and ignore LEN in case it has side-effects. */
10213 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10214 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10217 expr = fold_build_pointer_plus (dest, len);
10218 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10221 /* __memmove_chk special case. */
10222 if (fcode == BUILT_IN_MEMMOVE_CHK)
10224 unsigned int src_align = get_pointer_alignment (src);
10226 if (src_align == 0)
10227 return NULL_RTX;
10229 /* If src is categorized for a readonly section we can use
10230 normal __memcpy_chk. */
10231 if (readonly_data_expr (src))
10233 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10234 if (!fn)
10235 return NULL_RTX;
10236 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10237 dest, src, len, size);
10238 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10239 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10240 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10243 return NULL_RTX;
10247 /* Emit warning if a buffer overflow is detected at compile time. */
10249 static void
10250 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10252 /* The source string. */
10253 tree srcstr = NULL_TREE;
10254 /* The size of the destination object. */
10255 tree objsize = NULL_TREE;
10256 /* The string that is being concatenated with (as in __strcat_chk)
10257 or null if it isn't. */
10258 tree catstr = NULL_TREE;
10259 /* The maximum length of the source sequence in a bounded operation
10260 (such as __strncat_chk) or null if the operation isn't bounded
10261 (such as __strcat_chk). */
10262 tree maxread = NULL_TREE;
10263 /* The exact size of the access (such as in __strncpy_chk). */
10264 tree size = NULL_TREE;
10266 switch (fcode)
10268 case BUILT_IN_STRCPY_CHK:
10269 case BUILT_IN_STPCPY_CHK:
10270 srcstr = CALL_EXPR_ARG (exp, 1);
10271 objsize = CALL_EXPR_ARG (exp, 2);
10272 break;
10274 case BUILT_IN_STRCAT_CHK:
10275 /* For __strcat_chk the warning will be emitted only if overflowing
10276 by at least strlen (dest) + 1 bytes. */
10277 catstr = CALL_EXPR_ARG (exp, 0);
10278 srcstr = CALL_EXPR_ARG (exp, 1);
10279 objsize = CALL_EXPR_ARG (exp, 2);
10280 break;
10282 case BUILT_IN_STRNCAT_CHK:
10283 catstr = CALL_EXPR_ARG (exp, 0);
10284 srcstr = CALL_EXPR_ARG (exp, 1);
10285 maxread = CALL_EXPR_ARG (exp, 2);
10286 objsize = CALL_EXPR_ARG (exp, 3);
10287 break;
10289 case BUILT_IN_STRNCPY_CHK:
10290 case BUILT_IN_STPNCPY_CHK:
10291 srcstr = CALL_EXPR_ARG (exp, 1);
10292 size = CALL_EXPR_ARG (exp, 2);
10293 objsize = CALL_EXPR_ARG (exp, 3);
10294 break;
10296 case BUILT_IN_SNPRINTF_CHK:
10297 case BUILT_IN_VSNPRINTF_CHK:
10298 maxread = CALL_EXPR_ARG (exp, 1);
10299 objsize = CALL_EXPR_ARG (exp, 3);
10300 break;
10301 default:
10302 gcc_unreachable ();
10305 if (catstr && maxread)
10307 /* Check __strncat_chk. There is no way to determine the length
10308 of the string to which the source string is being appended so
10309 just warn when the length of the source string is not known. */
10310 check_strncat_sizes (exp, objsize);
10311 return;
10314 /* The destination argument is the first one for all built-ins above. */
10315 tree dst = CALL_EXPR_ARG (exp, 0);
10317 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10320 /* Emit warning if a buffer overflow is detected at compile time
10321 in __sprintf_chk/__vsprintf_chk calls. */
10323 static void
10324 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10326 tree size, len, fmt;
10327 const char *fmt_str;
10328 int nargs = call_expr_nargs (exp);
10330 /* Verify the required arguments in the original call. */
10332 if (nargs < 4)
10333 return;
10334 size = CALL_EXPR_ARG (exp, 2);
10335 fmt = CALL_EXPR_ARG (exp, 3);
10337 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10338 return;
10340 /* Check whether the format is a literal string constant. */
10341 fmt_str = c_getstr (fmt);
10342 if (fmt_str == NULL)
10343 return;
10345 if (!init_target_chars ())
10346 return;
10348 /* If the format doesn't contain % args or %%, we know its size. */
10349 if (strchr (fmt_str, target_percent) == 0)
10350 len = build_int_cstu (size_type_node, strlen (fmt_str));
10351 /* If the format is "%s" and first ... argument is a string literal,
10352 we know it too. */
10353 else if (fcode == BUILT_IN_SPRINTF_CHK
10354 && strcmp (fmt_str, target_percent_s) == 0)
10356 tree arg;
10358 if (nargs < 5)
10359 return;
10360 arg = CALL_EXPR_ARG (exp, 4);
10361 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10362 return;
10364 len = c_strlen (arg, 1);
10365 if (!len || ! tree_fits_uhwi_p (len))
10366 return;
10368 else
10369 return;
10371 /* Add one for the terminating nul. */
10372 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10374 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10375 /*maxread=*/NULL_TREE, len, size);
10378 /* Emit warning if a free is called with address of a variable. */
10380 static void
10381 maybe_emit_free_warning (tree exp)
10383 tree arg = CALL_EXPR_ARG (exp, 0);
10385 STRIP_NOPS (arg);
10386 if (TREE_CODE (arg) != ADDR_EXPR)
10387 return;
10389 arg = get_base_address (TREE_OPERAND (arg, 0));
10390 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10391 return;
10393 if (SSA_VAR_P (arg))
10394 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10395 "%Kattempt to free a non-heap object %qD", exp, arg);
10396 else
10397 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10398 "%Kattempt to free a non-heap object", exp);
10401 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10402 if possible. */
10404 static tree
10405 fold_builtin_object_size (tree ptr, tree ost)
10407 unsigned HOST_WIDE_INT bytes;
10408 int object_size_type;
10410 if (!validate_arg (ptr, POINTER_TYPE)
10411 || !validate_arg (ost, INTEGER_TYPE))
10412 return NULL_TREE;
10414 STRIP_NOPS (ost);
10416 if (TREE_CODE (ost) != INTEGER_CST
10417 || tree_int_cst_sgn (ost) < 0
10418 || compare_tree_int (ost, 3) > 0)
10419 return NULL_TREE;
10421 object_size_type = tree_to_shwi (ost);
10423 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10424 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10425 and (size_t) 0 for types 2 and 3. */
10426 if (TREE_SIDE_EFFECTS (ptr))
10427 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10429 if (TREE_CODE (ptr) == ADDR_EXPR)
10431 compute_builtin_object_size (ptr, object_size_type, &bytes);
10432 if (wi::fits_to_tree_p (bytes, size_type_node))
10433 return build_int_cstu (size_type_node, bytes);
10435 else if (TREE_CODE (ptr) == SSA_NAME)
10437 /* If object size is not known yet, delay folding until
10438 later. Maybe subsequent passes will help determining
10439 it. */
10440 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10441 && wi::fits_to_tree_p (bytes, size_type_node))
10442 return build_int_cstu (size_type_node, bytes);
10445 return NULL_TREE;
10448 /* Builtins with folding operations that operate on "..." arguments
10449 need special handling; we need to store the arguments in a convenient
10450 data structure before attempting any folding. Fortunately there are
10451 only a few builtins that fall into this category. FNDECL is the
10452 function, EXP is the CALL_EXPR for the call. */
10454 static tree
10455 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10457 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10458 tree ret = NULL_TREE;
10460 switch (fcode)
10462 case BUILT_IN_FPCLASSIFY:
10463 ret = fold_builtin_fpclassify (loc, args, nargs);
10464 break;
10466 default:
10467 break;
10469 if (ret)
10471 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10472 SET_EXPR_LOCATION (ret, loc);
10473 TREE_NO_WARNING (ret) = 1;
10474 return ret;
10476 return NULL_TREE;
10479 /* Initialize format string characters in the target charset. */
10481 bool
10482 init_target_chars (void)
10484 static bool init;
10485 if (!init)
10487 target_newline = lang_hooks.to_target_charset ('\n');
10488 target_percent = lang_hooks.to_target_charset ('%');
10489 target_c = lang_hooks.to_target_charset ('c');
10490 target_s = lang_hooks.to_target_charset ('s');
10491 if (target_newline == 0 || target_percent == 0 || target_c == 0
10492 || target_s == 0)
10493 return false;
10495 target_percent_c[0] = target_percent;
10496 target_percent_c[1] = target_c;
10497 target_percent_c[2] = '\0';
10499 target_percent_s[0] = target_percent;
10500 target_percent_s[1] = target_s;
10501 target_percent_s[2] = '\0';
10503 target_percent_s_newline[0] = target_percent;
10504 target_percent_s_newline[1] = target_s;
10505 target_percent_s_newline[2] = target_newline;
10506 target_percent_s_newline[3] = '\0';
10508 init = true;
10510 return true;
10513 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10514 and no overflow/underflow occurred. INEXACT is true if M was not
10515 exactly calculated. TYPE is the tree type for the result. This
10516 function assumes that you cleared the MPFR flags and then
10517 calculated M to see if anything subsequently set a flag prior to
10518 entering this function. Return NULL_TREE if any checks fail. */
10520 static tree
10521 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10523 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10524 overflow/underflow occurred. If -frounding-math, proceed iff the
10525 result of calling FUNC was exact. */
10526 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10527 && (!flag_rounding_math || !inexact))
10529 REAL_VALUE_TYPE rr;
10531 real_from_mpfr (&rr, m, type, GMP_RNDN);
10532 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10533 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10534 but the mpft_t is not, then we underflowed in the
10535 conversion. */
10536 if (real_isfinite (&rr)
10537 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10539 REAL_VALUE_TYPE rmode;
10541 real_convert (&rmode, TYPE_MODE (type), &rr);
10542 /* Proceed iff the specified mode can hold the value. */
10543 if (real_identical (&rmode, &rr))
10544 return build_real (type, rmode);
10547 return NULL_TREE;
10550 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10551 number and no overflow/underflow occurred. INEXACT is true if M
10552 was not exactly calculated. TYPE is the tree type for the result.
10553 This function assumes that you cleared the MPFR flags and then
10554 calculated M to see if anything subsequently set a flag prior to
10555 entering this function. Return NULL_TREE if any checks fail, if
10556 FORCE_CONVERT is true, then bypass the checks. */
10558 static tree
10559 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10561 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10562 overflow/underflow occurred. If -frounding-math, proceed iff the
10563 result of calling FUNC was exact. */
10564 if (force_convert
10565 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10566 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10567 && (!flag_rounding_math || !inexact)))
10569 REAL_VALUE_TYPE re, im;
10571 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10572 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10573 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10574 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10575 but the mpft_t is not, then we underflowed in the
10576 conversion. */
10577 if (force_convert
10578 || (real_isfinite (&re) && real_isfinite (&im)
10579 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10580 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10582 REAL_VALUE_TYPE re_mode, im_mode;
10584 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10585 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10586 /* Proceed iff the specified mode can hold the value. */
10587 if (force_convert
10588 || (real_identical (&re_mode, &re)
10589 && real_identical (&im_mode, &im)))
10590 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10591 build_real (TREE_TYPE (type), im_mode));
10594 return NULL_TREE;
10597 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10598 the pointer *(ARG_QUO) and return the result. The type is taken
10599 from the type of ARG0 and is used for setting the precision of the
10600 calculation and results. */
10602 static tree
10603 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10605 tree const type = TREE_TYPE (arg0);
10606 tree result = NULL_TREE;
10608 STRIP_NOPS (arg0);
10609 STRIP_NOPS (arg1);
10611 /* To proceed, MPFR must exactly represent the target floating point
10612 format, which only happens when the target base equals two. */
10613 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10614 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10615 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10617 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10618 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10620 if (real_isfinite (ra0) && real_isfinite (ra1))
10622 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10623 const int prec = fmt->p;
10624 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10625 tree result_rem;
10626 long integer_quo;
10627 mpfr_t m0, m1;
10629 mpfr_inits2 (prec, m0, m1, NULL);
10630 mpfr_from_real (m0, ra0, GMP_RNDN);
10631 mpfr_from_real (m1, ra1, GMP_RNDN);
10632 mpfr_clear_flags ();
10633 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10634 /* Remquo is independent of the rounding mode, so pass
10635 inexact=0 to do_mpfr_ckconv(). */
10636 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10637 mpfr_clears (m0, m1, NULL);
10638 if (result_rem)
10640 /* MPFR calculates quo in the host's long so it may
10641 return more bits in quo than the target int can hold
10642 if sizeof(host long) > sizeof(target int). This can
10643 happen even for native compilers in LP64 mode. In
10644 these cases, modulo the quo value with the largest
10645 number that the target int can hold while leaving one
10646 bit for the sign. */
10647 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10648 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10650 /* Dereference the quo pointer argument. */
10651 arg_quo = build_fold_indirect_ref (arg_quo);
10652 /* Proceed iff a valid pointer type was passed in. */
10653 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10655 /* Set the value. */
10656 tree result_quo
10657 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10658 build_int_cst (TREE_TYPE (arg_quo),
10659 integer_quo));
10660 TREE_SIDE_EFFECTS (result_quo) = 1;
10661 /* Combine the quo assignment with the rem. */
10662 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10663 result_quo, result_rem));
10668 return result;
10671 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10672 resulting value as a tree with type TYPE. The mpfr precision is
10673 set to the precision of TYPE. We assume that this mpfr function
10674 returns zero if the result could be calculated exactly within the
10675 requested precision. In addition, the integer pointer represented
10676 by ARG_SG will be dereferenced and set to the appropriate signgam
10677 (-1,1) value. */
10679 static tree
10680 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10682 tree result = NULL_TREE;
10684 STRIP_NOPS (arg);
10686 /* To proceed, MPFR must exactly represent the target floating point
10687 format, which only happens when the target base equals two. Also
10688 verify ARG is a constant and that ARG_SG is an int pointer. */
10689 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10690 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10691 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10692 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10694 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10696 /* In addition to NaN and Inf, the argument cannot be zero or a
10697 negative integer. */
10698 if (real_isfinite (ra)
10699 && ra->cl != rvc_zero
10700 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10702 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10703 const int prec = fmt->p;
10704 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10705 int inexact, sg;
10706 mpfr_t m;
10707 tree result_lg;
10709 mpfr_init2 (m, prec);
10710 mpfr_from_real (m, ra, GMP_RNDN);
10711 mpfr_clear_flags ();
10712 inexact = mpfr_lgamma (m, &sg, m, rnd);
10713 result_lg = do_mpfr_ckconv (m, type, inexact);
10714 mpfr_clear (m);
10715 if (result_lg)
10717 tree result_sg;
10719 /* Dereference the arg_sg pointer argument. */
10720 arg_sg = build_fold_indirect_ref (arg_sg);
10721 /* Assign the signgam value into *arg_sg. */
10722 result_sg = fold_build2 (MODIFY_EXPR,
10723 TREE_TYPE (arg_sg), arg_sg,
10724 build_int_cst (TREE_TYPE (arg_sg), sg));
10725 TREE_SIDE_EFFECTS (result_sg) = 1;
10726 /* Combine the signgam assignment with the lgamma result. */
10727 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10728 result_sg, result_lg));
10733 return result;
10736 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10737 mpc function FUNC on it and return the resulting value as a tree
10738 with type TYPE. The mpfr precision is set to the precision of
10739 TYPE. We assume that function FUNC returns zero if the result
10740 could be calculated exactly within the requested precision. If
10741 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10742 in the arguments and/or results. */
10744 tree
10745 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10746 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10748 tree result = NULL_TREE;
10750 STRIP_NOPS (arg0);
10751 STRIP_NOPS (arg1);
10753 /* To proceed, MPFR must exactly represent the target floating point
10754 format, which only happens when the target base equals two. */
10755 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10756 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10757 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10759 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10761 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10762 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10763 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10764 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10766 if (do_nonfinite
10767 || (real_isfinite (re0) && real_isfinite (im0)
10768 && real_isfinite (re1) && real_isfinite (im1)))
10770 const struct real_format *const fmt =
10771 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10772 const int prec = fmt->p;
10773 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10774 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10775 int inexact;
10776 mpc_t m0, m1;
10778 mpc_init2 (m0, prec);
10779 mpc_init2 (m1, prec);
10780 mpfr_from_real (mpc_realref (m0), re0, rnd);
10781 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10782 mpfr_from_real (mpc_realref (m1), re1, rnd);
10783 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10784 mpfr_clear_flags ();
10785 inexact = func (m0, m0, m1, crnd);
10786 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10787 mpc_clear (m0);
10788 mpc_clear (m1);
10792 return result;
10795 /* A wrapper function for builtin folding that prevents warnings for
10796 "statement without effect" and the like, caused by removing the
10797 call node earlier than the warning is generated. */
10799 tree
10800 fold_call_stmt (gcall *stmt, bool ignore)
10802 tree ret = NULL_TREE;
10803 tree fndecl = gimple_call_fndecl (stmt);
10804 location_t loc = gimple_location (stmt);
10805 if (fndecl
10806 && TREE_CODE (fndecl) == FUNCTION_DECL
10807 && DECL_BUILT_IN (fndecl)
10808 && !gimple_call_va_arg_pack_p (stmt))
10810 int nargs = gimple_call_num_args (stmt);
10811 tree *args = (nargs > 0
10812 ? gimple_call_arg_ptr (stmt, 0)
10813 : &error_mark_node);
10815 if (avoid_folding_inline_builtin (fndecl))
10816 return NULL_TREE;
10817 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10819 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10821 else
10823 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10824 if (ret)
10826 /* Propagate location information from original call to
10827 expansion of builtin. Otherwise things like
10828 maybe_emit_chk_warning, that operate on the expansion
10829 of a builtin, will use the wrong location information. */
10830 if (gimple_has_location (stmt))
10832 tree realret = ret;
10833 if (TREE_CODE (ret) == NOP_EXPR)
10834 realret = TREE_OPERAND (ret, 0);
10835 if (CAN_HAVE_LOCATION_P (realret)
10836 && !EXPR_HAS_LOCATION (realret))
10837 SET_EXPR_LOCATION (realret, loc);
10838 return realret;
10840 return ret;
10844 return NULL_TREE;
10847 /* Look up the function in builtin_decl that corresponds to DECL
10848 and set ASMSPEC as its user assembler name. DECL must be a
10849 function decl that declares a builtin. */
10851 void
10852 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10854 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10855 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10856 && asmspec != 0);
10858 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10859 set_user_assembler_name (builtin, asmspec);
10861 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10862 && INT_TYPE_SIZE < BITS_PER_WORD)
10864 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10865 set_user_assembler_libfunc ("ffs", asmspec);
10866 set_optab_libfunc (ffs_optab, mode, "ffs");
10870 /* Return true if DECL is a builtin that expands to a constant or similarly
10871 simple code. */
10872 bool
10873 is_simple_builtin (tree decl)
10875 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10876 switch (DECL_FUNCTION_CODE (decl))
10878 /* Builtins that expand to constants. */
10879 case BUILT_IN_CONSTANT_P:
10880 case BUILT_IN_EXPECT:
10881 case BUILT_IN_OBJECT_SIZE:
10882 case BUILT_IN_UNREACHABLE:
10883 /* Simple register moves or loads from stack. */
10884 case BUILT_IN_ASSUME_ALIGNED:
10885 case BUILT_IN_RETURN_ADDRESS:
10886 case BUILT_IN_EXTRACT_RETURN_ADDR:
10887 case BUILT_IN_FROB_RETURN_ADDR:
10888 case BUILT_IN_RETURN:
10889 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10890 case BUILT_IN_FRAME_ADDRESS:
10891 case BUILT_IN_VA_END:
10892 case BUILT_IN_STACK_SAVE:
10893 case BUILT_IN_STACK_RESTORE:
10894 /* Exception state returns or moves registers around. */
10895 case BUILT_IN_EH_FILTER:
10896 case BUILT_IN_EH_POINTER:
10897 case BUILT_IN_EH_COPY_VALUES:
10898 return true;
10900 default:
10901 return false;
10904 return false;
10907 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10908 most probably expanded inline into reasonably simple code. This is a
10909 superset of is_simple_builtin. */
10910 bool
10911 is_inexpensive_builtin (tree decl)
10913 if (!decl)
10914 return false;
10915 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10916 return true;
10917 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10918 switch (DECL_FUNCTION_CODE (decl))
10920 case BUILT_IN_ABS:
10921 CASE_BUILT_IN_ALLOCA:
10922 case BUILT_IN_BSWAP16:
10923 case BUILT_IN_BSWAP32:
10924 case BUILT_IN_BSWAP64:
10925 case BUILT_IN_CLZ:
10926 case BUILT_IN_CLZIMAX:
10927 case BUILT_IN_CLZL:
10928 case BUILT_IN_CLZLL:
10929 case BUILT_IN_CTZ:
10930 case BUILT_IN_CTZIMAX:
10931 case BUILT_IN_CTZL:
10932 case BUILT_IN_CTZLL:
10933 case BUILT_IN_FFS:
10934 case BUILT_IN_FFSIMAX:
10935 case BUILT_IN_FFSL:
10936 case BUILT_IN_FFSLL:
10937 case BUILT_IN_IMAXABS:
10938 case BUILT_IN_FINITE:
10939 case BUILT_IN_FINITEF:
10940 case BUILT_IN_FINITEL:
10941 case BUILT_IN_FINITED32:
10942 case BUILT_IN_FINITED64:
10943 case BUILT_IN_FINITED128:
10944 case BUILT_IN_FPCLASSIFY:
10945 case BUILT_IN_ISFINITE:
10946 case BUILT_IN_ISINF_SIGN:
10947 case BUILT_IN_ISINF:
10948 case BUILT_IN_ISINFF:
10949 case BUILT_IN_ISINFL:
10950 case BUILT_IN_ISINFD32:
10951 case BUILT_IN_ISINFD64:
10952 case BUILT_IN_ISINFD128:
10953 case BUILT_IN_ISNAN:
10954 case BUILT_IN_ISNANF:
10955 case BUILT_IN_ISNANL:
10956 case BUILT_IN_ISNAND32:
10957 case BUILT_IN_ISNAND64:
10958 case BUILT_IN_ISNAND128:
10959 case BUILT_IN_ISNORMAL:
10960 case BUILT_IN_ISGREATER:
10961 case BUILT_IN_ISGREATEREQUAL:
10962 case BUILT_IN_ISLESS:
10963 case BUILT_IN_ISLESSEQUAL:
10964 case BUILT_IN_ISLESSGREATER:
10965 case BUILT_IN_ISUNORDERED:
10966 case BUILT_IN_VA_ARG_PACK:
10967 case BUILT_IN_VA_ARG_PACK_LEN:
10968 case BUILT_IN_VA_COPY:
10969 case BUILT_IN_TRAP:
10970 case BUILT_IN_SAVEREGS:
10971 case BUILT_IN_POPCOUNTL:
10972 case BUILT_IN_POPCOUNTLL:
10973 case BUILT_IN_POPCOUNTIMAX:
10974 case BUILT_IN_POPCOUNT:
10975 case BUILT_IN_PARITYL:
10976 case BUILT_IN_PARITYLL:
10977 case BUILT_IN_PARITYIMAX:
10978 case BUILT_IN_PARITY:
10979 case BUILT_IN_LABS:
10980 case BUILT_IN_LLABS:
10981 case BUILT_IN_PREFETCH:
10982 case BUILT_IN_ACC_ON_DEVICE:
10983 return true;
10985 default:
10986 return is_simple_builtin (decl);
10989 return false;
10992 /* Return true if T is a constant and the value cast to a target char
10993 can be represented by a host char.
10994 Store the casted char constant in *P if so. */
10996 bool
10997 target_char_cst_p (tree t, char *p)
10999 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11000 return false;
11002 *p = (char)tree_to_uhwi (t);
11003 return true;
11006 /* Return the maximum object size. */
11008 tree
11009 max_object_size (void)
11011 /* To do: Make this a configurable parameter. */
11012 return TYPE_MAX_VALUE (ptrdiff_type_node);