PR c++/86728 - C variadic generic lambda.
[official-gcc.git] / gcc / builtins.c
blob867d153d79880b909dc05bbce83c076405d0b5d7
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 poly_int64 bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = targetm.constant_alignment (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
354 else if (TREE_CODE (exp) == STRING_CST)
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = targetm.constant_alignment (exp, align);
362 known_alignment = true;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
378 /* Account for the alignment of runtime coefficients, so that the constant
379 bitpos is guaranteed to be accurate. */
380 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
381 if (alt_align != 0 && alt_align < align)
383 align = alt_align;
384 known_alignment = false;
387 *alignp = align;
388 *bitposp = bitpos.coeffs[0] & (align - 1);
389 return known_alignment;
392 /* For a memory reference expression EXP compute values M and N such that M
393 divides (&EXP - N) and such that N < M. If these numbers can be determined,
394 store M in alignp and N in *BITPOSP and return true. Otherwise return false
395 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
397 bool
398 get_object_alignment_1 (tree exp, unsigned int *alignp,
399 unsigned HOST_WIDE_INT *bitposp)
401 return get_object_alignment_2 (exp, alignp, bitposp, false);
404 /* Return the alignment in bits of EXP, an object. */
406 unsigned int
407 get_object_alignment (tree exp)
409 unsigned HOST_WIDE_INT bitpos = 0;
410 unsigned int align;
412 get_object_alignment_1 (exp, &align, &bitpos);
414 /* align and bitpos now specify known low bits of the pointer.
415 ptr & (align - 1) == bitpos. */
417 if (bitpos != 0)
418 align = least_bit_hwi (bitpos);
419 return align;
422 /* For a pointer valued expression EXP compute values M and N such that M
423 divides (EXP - N) and such that N < M. If these numbers can be determined,
424 store M in alignp and N in *BITPOSP and return true. Return false if
425 the results are just a conservative approximation.
427 If EXP is not a pointer, false is returned too. */
429 bool
430 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
433 STRIP_NOPS (exp);
435 if (TREE_CODE (exp) == ADDR_EXPR)
436 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
437 alignp, bitposp, true);
438 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
440 unsigned int align;
441 unsigned HOST_WIDE_INT bitpos;
442 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
443 &align, &bitpos);
444 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
445 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
446 else
448 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
449 if (trailing_zeros < HOST_BITS_PER_INT)
451 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
452 if (inner)
453 align = MIN (align, inner);
456 *alignp = align;
457 *bitposp = bitpos & (align - 1);
458 return res;
460 else if (TREE_CODE (exp) == SSA_NAME
461 && POINTER_TYPE_P (TREE_TYPE (exp)))
463 unsigned int ptr_align, ptr_misalign;
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
466 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 *bitposp = ptr_misalign * BITS_PER_UNIT;
469 *alignp = ptr_align * BITS_PER_UNIT;
470 /* Make sure to return a sensible alignment when the multiplication
471 by BITS_PER_UNIT overflowed. */
472 if (*alignp == 0)
473 *alignp = 1u << (HOST_BITS_PER_INT - 1);
474 /* We cannot really tell whether this result is an approximation. */
475 return false;
477 else
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
484 else if (TREE_CODE (exp) == INTEGER_CST)
486 *alignp = BIGGEST_ALIGNMENT;
487 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
488 & (BIGGEST_ALIGNMENT - 1));
489 return true;
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
497 /* Return the alignment in bits of EXP, a pointer valued expression.
498 The alignment returned is, by default, the alignment of the thing that
499 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501 Otherwise, look at the expression to see if we can do better, i.e., if the
502 expression is actually pointing at an object whose alignment is tighter. */
504 unsigned int
505 get_pointer_alignment (tree exp)
507 unsigned HOST_WIDE_INT bitpos = 0;
508 unsigned int align;
510 get_pointer_alignment_1 (exp, &align, &bitpos);
512 /* align and bitpos now specify known low bits of the pointer.
513 ptr & (align - 1) == bitpos. */
515 if (bitpos != 0)
516 align = least_bit_hwi (bitpos);
518 return align;
521 /* Return the number of leading non-zero elements in the sequence
522 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
523 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
525 unsigned
526 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
528 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
530 unsigned n;
532 if (eltsize == 1)
534 /* Optimize the common case of plain char. */
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n;
538 if (!*elt)
539 break;
542 else
544 for (n = 0; n < maxelts; n++)
546 const char *elt = (const char*) ptr + n * eltsize;
547 if (!memcmp (elt, "\0\0\0\0", eltsize))
548 break;
551 return n;
554 /* Compute the length of a null-terminated character string or wide
555 character string handling character sizes of 1, 2, and 4 bytes.
556 TREE_STRING_LENGTH is not the right way because it evaluates to
557 the size of the character array in bytes (as opposed to characters)
558 and because it can contain a zero byte in the middle.
560 ONLY_VALUE should be nonzero if the result is not going to be emitted
561 into the instruction stream and zero if it is going to be expanded.
562 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
563 is returned, otherwise NULL, since
564 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
565 evaluate the side-effects.
567 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
568 accesses. Note that this implies the result is not going to be emitted
569 into the instruction stream.
571 The value returned is of type `ssizetype'.
573 Unfortunately, string_constant can't access the values of const char
574 arrays with initializers, so neither can we do so here. */
576 tree
577 c_strlen (tree src, int only_value)
579 STRIP_NOPS (src);
580 if (TREE_CODE (src) == COND_EXPR
581 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 tree len1, len2;
585 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
586 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
587 if (tree_int_cst_equal (len1, len2))
588 return len1;
591 if (TREE_CODE (src) == COMPOUND_EXPR
592 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 return c_strlen (TREE_OPERAND (src, 1), only_value);
595 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
597 /* Offset from the beginning of the string in bytes. */
598 tree byteoff;
599 src = string_constant (src, &byteoff);
600 if (src == 0)
601 return NULL_TREE;
603 /* Determine the size of the string element. */
604 unsigned eltsize
605 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
607 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
608 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
609 in case the latter is less than the size of the array, such as when
610 SRC refers to a short string literal used to initialize a large array.
611 In that case, the elements of the array after the terminating NUL are
612 all NUL. */
613 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
614 strelts = strelts / eltsize - 1;
616 HOST_WIDE_INT maxelts = strelts;
617 tree type = TREE_TYPE (src);
618 if (tree size = TYPE_SIZE_UNIT (type))
619 if (tree_fits_shwi_p (size))
621 maxelts = tree_to_uhwi (size);
622 maxelts = maxelts / eltsize - 1;
625 /* PTR can point to the byte representation of any string type, including
626 char* and wchar_t*. */
627 const char *ptr = TREE_STRING_POINTER (src);
629 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
631 /* If the string has an internal NUL character followed by any
632 non-NUL characters (e.g., "foo\0bar"), we can't compute
633 the offset to the following NUL if we don't know where to
634 start searching for it. */
635 unsigned len = string_length (ptr, eltsize, strelts);
636 if (len < strelts)
638 /* Return when an embedded null character is found. */
639 return NULL_TREE;
642 if (!maxelts)
643 return ssize_int (0);
645 /* We don't know the starting offset, but we do know that the string
646 has no internal zero bytes. If the offset falls within the bounds
647 of the string subtract the offset from the length of the string,
648 and return that. Otherwise the length is zero. Take care to
649 use SAVE_EXPR in case the OFFSET has side-effects. */
650 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
651 offsave = fold_convert (ssizetype, offsave);
652 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
653 build_int_cst (ssizetype, len * eltsize));
654 tree lenexp = size_diffop_loc (loc, ssize_int (strelts * eltsize), offsave);
655 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
656 build_zero_cst (ssizetype));
659 /* Offset from the beginning of the string in elements. */
660 HOST_WIDE_INT eltoff;
662 /* We have a known offset into the string. Start searching there for
663 a null character if we can represent it as a single HOST_WIDE_INT. */
664 if (byteoff == 0)
665 eltoff = 0;
666 else if (! tree_fits_shwi_p (byteoff))
667 eltoff = -1;
668 else
669 eltoff = tree_to_shwi (byteoff) / eltsize;
671 /* If the offset is known to be out of bounds, warn, and call strlen at
672 runtime. */
673 if (eltoff < 0 || eltoff > maxelts)
675 /* Suppress multiple warnings for propagated constant strings. */
676 if (only_value != 2
677 && !TREE_NO_WARNING (src))
679 warning_at (loc, OPT_Warray_bounds,
680 "offset %qwi outside bounds of constant string",
681 eltoff);
682 TREE_NO_WARNING (src) = 1;
684 return NULL_TREE;
687 /* Use strlen to search for the first zero byte. Since any strings
688 constructed with build_string will have nulls appended, we win even
689 if we get handed something like (char[4])"abcd".
691 Since ELTOFF is our starting index into the string, no further
692 calculation is needed. */
693 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
694 maxelts - eltoff);
696 return ssize_int (len);
699 /* Return a constant integer corresponding to target reading
700 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
702 static rtx
703 c_readstr (const char *str, scalar_int_mode mode)
705 HOST_WIDE_INT ch;
706 unsigned int i, j;
707 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
709 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
710 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
711 / HOST_BITS_PER_WIDE_INT;
713 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
714 for (i = 0; i < len; i++)
715 tmp[i] = 0;
717 ch = 1;
718 for (i = 0; i < GET_MODE_SIZE (mode); i++)
720 j = i;
721 if (WORDS_BIG_ENDIAN)
722 j = GET_MODE_SIZE (mode) - i - 1;
723 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
724 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
725 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
726 j *= BITS_PER_UNIT;
728 if (ch)
729 ch = (unsigned char) str[i];
730 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
733 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
734 return immed_wide_int_const (c, mode);
737 /* Cast a target constant CST to target CHAR and if that value fits into
738 host char type, return zero and put that value into variable pointed to by
739 P. */
741 static int
742 target_char_cast (tree cst, char *p)
744 unsigned HOST_WIDE_INT val, hostval;
746 if (TREE_CODE (cst) != INTEGER_CST
747 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
748 return 1;
750 /* Do not care if it fits or not right here. */
751 val = TREE_INT_CST_LOW (cst);
753 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
754 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
756 hostval = val;
757 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
758 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
760 if (val != hostval)
761 return 1;
763 *p = hostval;
764 return 0;
767 /* Similar to save_expr, but assumes that arbitrary code is not executed
768 in between the multiple evaluations. In particular, we assume that a
769 non-addressable local variable will not be modified. */
771 static tree
772 builtin_save_expr (tree exp)
774 if (TREE_CODE (exp) == SSA_NAME
775 || (TREE_ADDRESSABLE (exp) == 0
776 && (TREE_CODE (exp) == PARM_DECL
777 || (VAR_P (exp) && !TREE_STATIC (exp)))))
778 return exp;
780 return save_expr (exp);
783 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
784 times to get the address of either a higher stack frame, or a return
785 address located within it (depending on FNDECL_CODE). */
787 static rtx
788 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
790 int i;
791 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
792 if (tem == NULL_RTX)
794 /* For a zero count with __builtin_return_address, we don't care what
795 frame address we return, because target-specific definitions will
796 override us. Therefore frame pointer elimination is OK, and using
797 the soft frame pointer is OK.
799 For a nonzero count, or a zero count with __builtin_frame_address,
800 we require a stable offset from the current frame pointer to the
801 previous one, so we must use the hard frame pointer, and
802 we must disable frame pointer elimination. */
803 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
804 tem = frame_pointer_rtx;
805 else
807 tem = hard_frame_pointer_rtx;
809 /* Tell reload not to eliminate the frame pointer. */
810 crtl->accesses_prior_frames = 1;
814 if (count > 0)
815 SETUP_FRAME_ADDRESSES ();
817 /* On the SPARC, the return address is not in the frame, it is in a
818 register. There is no way to access it off of the current frame
819 pointer, but it can be accessed off the previous frame pointer by
820 reading the value from the register window save area. */
821 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
822 count--;
824 /* Scan back COUNT frames to the specified frame. */
825 for (i = 0; i < count; i++)
827 /* Assume the dynamic chain pointer is in the word that the
828 frame address points to, unless otherwise specified. */
829 tem = DYNAMIC_CHAIN_ADDRESS (tem);
830 tem = memory_address (Pmode, tem);
831 tem = gen_frame_mem (Pmode, tem);
832 tem = copy_to_reg (tem);
835 /* For __builtin_frame_address, return what we've got. But, on
836 the SPARC for example, we may have to add a bias. */
837 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
838 return FRAME_ADDR_RTX (tem);
840 /* For __builtin_return_address, get the return address from that frame. */
841 #ifdef RETURN_ADDR_RTX
842 tem = RETURN_ADDR_RTX (count, tem);
843 #else
844 tem = memory_address (Pmode,
845 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
846 tem = gen_frame_mem (Pmode, tem);
847 #endif
848 return tem;
851 /* Alias set used for setjmp buffer. */
852 static alias_set_type setjmp_alias_set = -1;
854 /* Construct the leading half of a __builtin_setjmp call. Control will
855 return to RECEIVER_LABEL. This is also called directly by the SJLJ
856 exception handling code. */
858 void
859 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
861 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
862 rtx stack_save;
863 rtx mem;
865 if (setjmp_alias_set == -1)
866 setjmp_alias_set = new_alias_set ();
868 buf_addr = convert_memory_address (Pmode, buf_addr);
870 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
872 /* We store the frame pointer and the address of receiver_label in
873 the buffer and use the rest of it for the stack save area, which
874 is machine-dependent. */
876 mem = gen_rtx_MEM (Pmode, buf_addr);
877 set_mem_alias_set (mem, setjmp_alias_set);
878 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
880 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
881 GET_MODE_SIZE (Pmode))),
882 set_mem_alias_set (mem, setjmp_alias_set);
884 emit_move_insn (validize_mem (mem),
885 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
887 stack_save = gen_rtx_MEM (sa_mode,
888 plus_constant (Pmode, buf_addr,
889 2 * GET_MODE_SIZE (Pmode)));
890 set_mem_alias_set (stack_save, setjmp_alias_set);
891 emit_stack_save (SAVE_NONLOCAL, &stack_save);
893 /* If there is further processing to do, do it. */
894 if (targetm.have_builtin_setjmp_setup ())
895 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
897 /* We have a nonlocal label. */
898 cfun->has_nonlocal_label = 1;
901 /* Construct the trailing part of a __builtin_setjmp call. This is
902 also called directly by the SJLJ exception handling code.
903 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
905 void
906 expand_builtin_setjmp_receiver (rtx receiver_label)
908 rtx chain;
910 /* Mark the FP as used when we get here, so we have to make sure it's
911 marked as used by this function. */
912 emit_use (hard_frame_pointer_rtx);
914 /* Mark the static chain as clobbered here so life information
915 doesn't get messed up for it. */
916 chain = rtx_for_static_chain (current_function_decl, true);
917 if (chain && REG_P (chain))
918 emit_clobber (chain);
920 /* Now put in the code to restore the frame pointer, and argument
921 pointer, if needed. */
922 if (! targetm.have_nonlocal_goto ())
924 /* First adjust our frame pointer to its actual value. It was
925 previously set to the start of the virtual area corresponding to
926 the stacked variables when we branched here and now needs to be
927 adjusted to the actual hardware fp value.
929 Assignments to virtual registers are converted by
930 instantiate_virtual_regs into the corresponding assignment
931 to the underlying register (fp in this case) that makes
932 the original assignment true.
933 So the following insn will actually be decrementing fp by
934 TARGET_STARTING_FRAME_OFFSET. */
935 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
937 /* Restoring the frame pointer also modifies the hard frame pointer.
938 Mark it used (so that the previous assignment remains live once
939 the frame pointer is eliminated) and clobbered (to represent the
940 implicit update from the assignment). */
941 emit_use (hard_frame_pointer_rtx);
942 emit_clobber (hard_frame_pointer_rtx);
945 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
947 /* If the argument pointer can be eliminated in favor of the
948 frame pointer, we don't need to restore it. We assume here
949 that if such an elimination is present, it can always be used.
950 This is the case on all known machines; if we don't make this
951 assumption, we do unnecessary saving on many machines. */
952 size_t i;
953 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
955 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
956 if (elim_regs[i].from == ARG_POINTER_REGNUM
957 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
958 break;
960 if (i == ARRAY_SIZE (elim_regs))
962 /* Now restore our arg pointer from the address at which it
963 was saved in our stack frame. */
964 emit_move_insn (crtl->args.internal_arg_pointer,
965 copy_to_reg (get_arg_pointer_save_area ()));
969 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
970 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
971 else if (targetm.have_nonlocal_goto_receiver ())
972 emit_insn (targetm.gen_nonlocal_goto_receiver ());
973 else
974 { /* Nothing */ }
976 /* We must not allow the code we just generated to be reordered by
977 scheduling. Specifically, the update of the frame pointer must
978 happen immediately, not later. */
979 emit_insn (gen_blockage ());
982 /* __builtin_longjmp is passed a pointer to an array of five words (not
983 all will be used on all machines). It operates similarly to the C
984 library function of the same name, but is more efficient. Much of
985 the code below is copied from the handling of non-local gotos. */
987 static void
988 expand_builtin_longjmp (rtx buf_addr, rtx value)
990 rtx fp, lab, stack;
991 rtx_insn *insn, *last;
992 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
994 /* DRAP is needed for stack realign if longjmp is expanded to current
995 function */
996 if (SUPPORTS_STACK_ALIGNMENT)
997 crtl->need_drap = true;
999 if (setjmp_alias_set == -1)
1000 setjmp_alias_set = new_alias_set ();
1002 buf_addr = convert_memory_address (Pmode, buf_addr);
1004 buf_addr = force_reg (Pmode, buf_addr);
1006 /* We require that the user must pass a second argument of 1, because
1007 that is what builtin_setjmp will return. */
1008 gcc_assert (value == const1_rtx);
1010 last = get_last_insn ();
1011 if (targetm.have_builtin_longjmp ())
1012 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1013 else
1015 fp = gen_rtx_MEM (Pmode, buf_addr);
1016 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1017 GET_MODE_SIZE (Pmode)));
1019 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1020 2 * GET_MODE_SIZE (Pmode)));
1021 set_mem_alias_set (fp, setjmp_alias_set);
1022 set_mem_alias_set (lab, setjmp_alias_set);
1023 set_mem_alias_set (stack, setjmp_alias_set);
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 if (targetm.have_nonlocal_goto ())
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1032 else
1034 lab = copy_to_reg (lab);
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 emit_move_insn (hard_frame_pointer_rtx, fp);
1040 emit_stack_restore (SAVE_NONLOCAL, stack);
1042 emit_use (hard_frame_pointer_rtx);
1043 emit_use (stack_pointer_rtx);
1044 emit_indirect_jump (lab);
1048 /* Search backwards and mark the jump insn as a non-local goto.
1049 Note that this precludes the use of __builtin_longjmp to a
1050 __builtin_setjmp target in the same function. However, we've
1051 already cautioned the user that these functions are for
1052 internal exception handling use only. */
1053 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 gcc_assert (insn != last);
1057 if (JUMP_P (insn))
1059 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1060 break;
1062 else if (CALL_P (insn))
1063 break;
1067 static inline bool
1068 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 return (iter->i < iter->n);
1073 /* This function validates the types of a function call argument list
1074 against a specified list of tree_codes. If the last specifier is a 0,
1075 that represents an ellipsis, otherwise the last specifier must be a
1076 VOID_TYPE. */
1078 static bool
1079 validate_arglist (const_tree callexpr, ...)
1081 enum tree_code code;
1082 bool res = 0;
1083 va_list ap;
1084 const_call_expr_arg_iterator iter;
1085 const_tree arg;
1087 va_start (ap, callexpr);
1088 init_const_call_expr_arg_iterator (callexpr, &iter);
1090 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1091 tree fn = CALL_EXPR_FN (callexpr);
1092 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1094 for (unsigned argno = 1; ; ++argno)
1096 code = (enum tree_code) va_arg (ap, int);
1098 switch (code)
1100 case 0:
1101 /* This signifies an ellipses, any further arguments are all ok. */
1102 res = true;
1103 goto end;
1104 case VOID_TYPE:
1105 /* This signifies an endlink, if no arguments remain, return
1106 true, otherwise return false. */
1107 res = !more_const_call_expr_args_p (&iter);
1108 goto end;
1109 case POINTER_TYPE:
1110 /* The actual argument must be nonnull when either the whole
1111 called function has been declared nonnull, or when the formal
1112 argument corresponding to the actual argument has been. */
1113 if (argmap
1114 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1116 arg = next_const_call_expr_arg (&iter);
1117 if (!validate_arg (arg, code) || integer_zerop (arg))
1118 goto end;
1119 break;
1121 /* FALLTHRU */
1122 default:
1123 /* If no parameters remain or the parameter's code does not
1124 match the specified code, return false. Otherwise continue
1125 checking any remaining arguments. */
1126 arg = next_const_call_expr_arg (&iter);
1127 if (!validate_arg (arg, code))
1128 goto end;
1129 break;
1133 /* We need gotos here since we can only have one VA_CLOSE in a
1134 function. */
1135 end: ;
1136 va_end (ap);
1138 BITMAP_FREE (argmap);
1140 return res;
1143 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1144 and the address of the save area. */
1146 static rtx
1147 expand_builtin_nonlocal_goto (tree exp)
1149 tree t_label, t_save_area;
1150 rtx r_label, r_save_area, r_fp, r_sp;
1151 rtx_insn *insn;
1153 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1154 return NULL_RTX;
1156 t_label = CALL_EXPR_ARG (exp, 0);
1157 t_save_area = CALL_EXPR_ARG (exp, 1);
1159 r_label = expand_normal (t_label);
1160 r_label = convert_memory_address (Pmode, r_label);
1161 r_save_area = expand_normal (t_save_area);
1162 r_save_area = convert_memory_address (Pmode, r_save_area);
1163 /* Copy the address of the save location to a register just in case it was
1164 based on the frame pointer. */
1165 r_save_area = copy_to_reg (r_save_area);
1166 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1167 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1168 plus_constant (Pmode, r_save_area,
1169 GET_MODE_SIZE (Pmode)));
1171 crtl->has_nonlocal_goto = 1;
1173 /* ??? We no longer need to pass the static chain value, afaik. */
1174 if (targetm.have_nonlocal_goto ())
1175 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1176 else
1178 r_label = copy_to_reg (r_label);
1180 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1181 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1183 /* Restore frame pointer for containing function. */
1184 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1185 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1187 /* USE of hard_frame_pointer_rtx added for consistency;
1188 not clear if really needed. */
1189 emit_use (hard_frame_pointer_rtx);
1190 emit_use (stack_pointer_rtx);
1192 /* If the architecture is using a GP register, we must
1193 conservatively assume that the target function makes use of it.
1194 The prologue of functions with nonlocal gotos must therefore
1195 initialize the GP register to the appropriate value, and we
1196 must then make sure that this value is live at the point
1197 of the jump. (Note that this doesn't necessarily apply
1198 to targets with a nonlocal_goto pattern; they are free
1199 to implement it in their own way. Note also that this is
1200 a no-op if the GP register is a global invariant.) */
1201 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1202 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1203 emit_use (pic_offset_table_rtx);
1205 emit_indirect_jump (r_label);
1208 /* Search backwards to the jump insn and mark it as a
1209 non-local goto. */
1210 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1212 if (JUMP_P (insn))
1214 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1215 break;
1217 else if (CALL_P (insn))
1218 break;
1221 return const0_rtx;
1224 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1225 (not all will be used on all machines) that was passed to __builtin_setjmp.
1226 It updates the stack pointer in that block to the current value. This is
1227 also called directly by the SJLJ exception handling code. */
1229 void
1230 expand_builtin_update_setjmp_buf (rtx buf_addr)
1232 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1233 buf_addr = convert_memory_address (Pmode, buf_addr);
1234 rtx stack_save
1235 = gen_rtx_MEM (sa_mode,
1236 memory_address
1237 (sa_mode,
1238 plus_constant (Pmode, buf_addr,
1239 2 * GET_MODE_SIZE (Pmode))));
1241 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1244 /* Expand a call to __builtin_prefetch. For a target that does not support
1245 data prefetch, evaluate the memory address argument in case it has side
1246 effects. */
1248 static void
1249 expand_builtin_prefetch (tree exp)
1251 tree arg0, arg1, arg2;
1252 int nargs;
1253 rtx op0, op1, op2;
1255 if (!validate_arglist (exp, POINTER_TYPE, 0))
1256 return;
1258 arg0 = CALL_EXPR_ARG (exp, 0);
1260 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1261 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1262 locality). */
1263 nargs = call_expr_nargs (exp);
1264 if (nargs > 1)
1265 arg1 = CALL_EXPR_ARG (exp, 1);
1266 else
1267 arg1 = integer_zero_node;
1268 if (nargs > 2)
1269 arg2 = CALL_EXPR_ARG (exp, 2);
1270 else
1271 arg2 = integer_three_node;
1273 /* Argument 0 is an address. */
1274 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1276 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1277 if (TREE_CODE (arg1) != INTEGER_CST)
1279 error ("second argument to %<__builtin_prefetch%> must be a constant");
1280 arg1 = integer_zero_node;
1282 op1 = expand_normal (arg1);
1283 /* Argument 1 must be either zero or one. */
1284 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1286 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1287 " using zero");
1288 op1 = const0_rtx;
1291 /* Argument 2 (locality) must be a compile-time constant int. */
1292 if (TREE_CODE (arg2) != INTEGER_CST)
1294 error ("third argument to %<__builtin_prefetch%> must be a constant");
1295 arg2 = integer_zero_node;
1297 op2 = expand_normal (arg2);
1298 /* Argument 2 must be 0, 1, 2, or 3. */
1299 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1301 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1302 op2 = const0_rtx;
1305 if (targetm.have_prefetch ())
1307 struct expand_operand ops[3];
1309 create_address_operand (&ops[0], op0);
1310 create_integer_operand (&ops[1], INTVAL (op1));
1311 create_integer_operand (&ops[2], INTVAL (op2));
1312 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1313 return;
1316 /* Don't do anything with direct references to volatile memory, but
1317 generate code to handle other side effects. */
1318 if (!MEM_P (op0) && side_effects_p (op0))
1319 emit_insn (op0);
1322 /* Get a MEM rtx for expression EXP which is the address of an operand
1323 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1324 the maximum length of the block of memory that might be accessed or
1325 NULL if unknown. */
1327 static rtx
1328 get_memory_rtx (tree exp, tree len)
1330 tree orig_exp = exp;
1331 rtx addr, mem;
1333 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1334 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1335 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1336 exp = TREE_OPERAND (exp, 0);
1338 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1339 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1341 /* Get an expression we can use to find the attributes to assign to MEM.
1342 First remove any nops. */
1343 while (CONVERT_EXPR_P (exp)
1344 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1345 exp = TREE_OPERAND (exp, 0);
1347 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1348 (as builtin stringops may alias with anything). */
1349 exp = fold_build2 (MEM_REF,
1350 build_array_type (char_type_node,
1351 build_range_type (sizetype,
1352 size_one_node, len)),
1353 exp, build_int_cst (ptr_type_node, 0));
1355 /* If the MEM_REF has no acceptable address, try to get the base object
1356 from the original address we got, and build an all-aliasing
1357 unknown-sized access to that one. */
1358 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1359 set_mem_attributes (mem, exp, 0);
1360 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1361 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1362 0))))
1364 exp = build_fold_addr_expr (exp);
1365 exp = fold_build2 (MEM_REF,
1366 build_array_type (char_type_node,
1367 build_range_type (sizetype,
1368 size_zero_node,
1369 NULL)),
1370 exp, build_int_cst (ptr_type_node, 0));
1371 set_mem_attributes (mem, exp, 0);
1373 set_mem_alias_set (mem, 0);
1374 return mem;
1377 /* Built-in functions to perform an untyped call and return. */
1379 #define apply_args_mode \
1380 (this_target_builtins->x_apply_args_mode)
1381 #define apply_result_mode \
1382 (this_target_builtins->x_apply_result_mode)
1384 /* Return the size required for the block returned by __builtin_apply_args,
1385 and initialize apply_args_mode. */
1387 static int
1388 apply_args_size (void)
1390 static int size = -1;
1391 int align;
1392 unsigned int regno;
1394 /* The values computed by this function never change. */
1395 if (size < 0)
1397 /* The first value is the incoming arg-pointer. */
1398 size = GET_MODE_SIZE (Pmode);
1400 /* The second value is the structure value address unless this is
1401 passed as an "invisible" first argument. */
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1406 if (FUNCTION_ARG_REGNO_P (regno))
1408 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1410 gcc_assert (mode != VOIDmode);
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1415 size += GET_MODE_SIZE (mode);
1416 apply_args_mode[regno] = mode;
1418 else
1420 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1423 return size;
1426 /* Return the size required for the block returned by __builtin_apply,
1427 and initialize apply_result_mode. */
1429 static int
1430 apply_result_size (void)
1432 static int size = -1;
1433 int align, regno;
1435 /* The values computed by this function never change. */
1436 if (size < 0)
1438 size = 0;
1440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441 if (targetm.calls.function_value_regno_p (regno))
1443 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1445 gcc_assert (mode != VOIDmode);
1447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 if (size % align != 0)
1449 size = CEIL (size, align) * align;
1450 size += GET_MODE_SIZE (mode);
1451 apply_result_mode[regno] = mode;
1453 else
1454 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1456 /* Allow targets that use untyped_call and untyped_return to override
1457 the size so that machine-specific information can be stored here. */
1458 #ifdef APPLY_RESULT_SIZE
1459 size = APPLY_RESULT_SIZE;
1460 #endif
1462 return size;
1465 /* Create a vector describing the result block RESULT. If SAVEP is true,
1466 the result block is used to save the values; otherwise it is used to
1467 restore the values. */
1469 static rtx
1470 result_vector (int savep, rtx result)
1472 int regno, size, align, nelts;
1473 fixed_size_mode mode;
1474 rtx reg, mem;
1475 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1477 size = nelts = 0;
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_result_mode[regno]) != VOIDmode)
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1484 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1485 mem = adjust_address (result, mode, size);
1486 savevec[nelts++] = (savep
1487 ? gen_rtx_SET (mem, reg)
1488 : gen_rtx_SET (reg, mem));
1489 size += GET_MODE_SIZE (mode);
1491 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1494 /* Save the state required to perform an untyped call with the same
1495 arguments as were passed to the current function. */
1497 static rtx
1498 expand_builtin_apply_args_1 (void)
1500 rtx registers, tem;
1501 int size, align, regno;
1502 fixed_size_mode mode;
1503 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1505 /* Create a block where the arg-pointer, structure value address,
1506 and argument registers can be saved. */
1507 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1509 /* Walk past the arg-pointer and structure value address. */
1510 size = GET_MODE_SIZE (Pmode);
1511 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1512 size += GET_MODE_SIZE (Pmode);
1514 /* Save each register used in calling a function to the block. */
1515 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1516 if ((mode = apply_args_mode[regno]) != VOIDmode)
1518 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1519 if (size % align != 0)
1520 size = CEIL (size, align) * align;
1522 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1524 emit_move_insn (adjust_address (registers, mode, size), tem);
1525 size += GET_MODE_SIZE (mode);
1528 /* Save the arg pointer to the block. */
1529 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1530 /* We need the pointer as the caller actually passed them to us, not
1531 as we might have pretended they were passed. Make sure it's a valid
1532 operand, as emit_move_insn isn't expected to handle a PLUS. */
1533 if (STACK_GROWS_DOWNWARD)
1535 = force_operand (plus_constant (Pmode, tem,
1536 crtl->args.pretend_args_size),
1537 NULL_RTX);
1538 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1540 size = GET_MODE_SIZE (Pmode);
1542 /* Save the structure value address unless this is passed as an
1543 "invisible" first argument. */
1544 if (struct_incoming_value)
1546 emit_move_insn (adjust_address (registers, Pmode, size),
1547 copy_to_reg (struct_incoming_value));
1548 size += GET_MODE_SIZE (Pmode);
1551 /* Return the address of the block. */
1552 return copy_addr_to_reg (XEXP (registers, 0));
1555 /* __builtin_apply_args returns block of memory allocated on
1556 the stack into which is stored the arg pointer, structure
1557 value address, static chain, and all the registers that might
1558 possibly be used in performing a function call. The code is
1559 moved to the start of the function so the incoming values are
1560 saved. */
1562 static rtx
1563 expand_builtin_apply_args (void)
1565 /* Don't do __builtin_apply_args more than once in a function.
1566 Save the result of the first call and reuse it. */
1567 if (apply_args_value != 0)
1568 return apply_args_value;
1570 /* When this function is called, it means that registers must be
1571 saved on entry to this function. So we migrate the
1572 call to the first insn of this function. */
1573 rtx temp;
1575 start_sequence ();
1576 temp = expand_builtin_apply_args_1 ();
1577 rtx_insn *seq = get_insns ();
1578 end_sequence ();
1580 apply_args_value = temp;
1582 /* Put the insns after the NOTE that starts the function.
1583 If this is inside a start_sequence, make the outer-level insn
1584 chain current, so the code is placed at the start of the
1585 function. If internal_arg_pointer is a non-virtual pseudo,
1586 it needs to be placed after the function that initializes
1587 that pseudo. */
1588 push_topmost_sequence ();
1589 if (REG_P (crtl->args.internal_arg_pointer)
1590 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1591 emit_insn_before (seq, parm_birth_insn);
1592 else
1593 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1594 pop_topmost_sequence ();
1595 return temp;
1599 /* Perform an untyped call and save the state required to perform an
1600 untyped return of whatever value was returned by the given function. */
1602 static rtx
1603 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1605 int size, align, regno;
1606 fixed_size_mode mode;
1607 rtx incoming_args, result, reg, dest, src;
1608 rtx_call_insn *call_insn;
1609 rtx old_stack_level = 0;
1610 rtx call_fusage = 0;
1611 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1613 arguments = convert_memory_address (Pmode, arguments);
1615 /* Create a block where the return registers can be saved. */
1616 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1618 /* Fetch the arg pointer from the ARGUMENTS block. */
1619 incoming_args = gen_reg_rtx (Pmode);
1620 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1621 if (!STACK_GROWS_DOWNWARD)
1622 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1623 incoming_args, 0, OPTAB_LIB_WIDEN);
1625 /* Push a new argument block and copy the arguments. Do not allow
1626 the (potential) memcpy call below to interfere with our stack
1627 manipulations. */
1628 do_pending_stack_adjust ();
1629 NO_DEFER_POP;
1631 /* Save the stack with nonlocal if available. */
1632 if (targetm.have_save_stack_nonlocal ())
1633 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1634 else
1635 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1637 /* Allocate a block of memory onto the stack and copy the memory
1638 arguments to the outgoing arguments address. We can pass TRUE
1639 as the 4th argument because we just saved the stack pointer
1640 and will restore it right after the call. */
1641 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1643 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1644 may have already set current_function_calls_alloca to true.
1645 current_function_calls_alloca won't be set if argsize is zero,
1646 so we have to guarantee need_drap is true here. */
1647 if (SUPPORTS_STACK_ALIGNMENT)
1648 crtl->need_drap = true;
1650 dest = virtual_outgoing_args_rtx;
1651 if (!STACK_GROWS_DOWNWARD)
1653 if (CONST_INT_P (argsize))
1654 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1655 else
1656 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1658 dest = gen_rtx_MEM (BLKmode, dest);
1659 set_mem_align (dest, PARM_BOUNDARY);
1660 src = gen_rtx_MEM (BLKmode, incoming_args);
1661 set_mem_align (src, PARM_BOUNDARY);
1662 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1664 /* Refer to the argument block. */
1665 apply_args_size ();
1666 arguments = gen_rtx_MEM (BLKmode, arguments);
1667 set_mem_align (arguments, PARM_BOUNDARY);
1669 /* Walk past the arg-pointer and structure value address. */
1670 size = GET_MODE_SIZE (Pmode);
1671 if (struct_value)
1672 size += GET_MODE_SIZE (Pmode);
1674 /* Restore each of the registers previously saved. Make USE insns
1675 for each of these registers for use in making the call. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_args_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, regno);
1683 emit_move_insn (reg, adjust_address (arguments, mode, size));
1684 use_reg (&call_fusage, reg);
1685 size += GET_MODE_SIZE (mode);
1688 /* Restore the structure value address unless this is passed as an
1689 "invisible" first argument. */
1690 size = GET_MODE_SIZE (Pmode);
1691 if (struct_value)
1693 rtx value = gen_reg_rtx (Pmode);
1694 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1695 emit_move_insn (struct_value, value);
1696 if (REG_P (struct_value))
1697 use_reg (&call_fusage, struct_value);
1698 size += GET_MODE_SIZE (Pmode);
1701 /* All arguments and registers used for the call are set up by now! */
1702 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1704 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1705 and we don't want to load it into a register as an optimization,
1706 because prepare_call_address already did it if it should be done. */
1707 if (GET_CODE (function) != SYMBOL_REF)
1708 function = memory_address (FUNCTION_MODE, function);
1710 /* Generate the actual call instruction and save the return value. */
1711 if (targetm.have_untyped_call ())
1713 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1714 emit_call_insn (targetm.gen_untyped_call (mem, result,
1715 result_vector (1, result)));
1717 else if (targetm.have_call_value ())
1719 rtx valreg = 0;
1721 /* Locate the unique return register. It is not possible to
1722 express a call that sets more than one return register using
1723 call_value; use untyped_call for that. In fact, untyped_call
1724 only needs to save the return registers in the given block. */
1725 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1726 if ((mode = apply_result_mode[regno]) != VOIDmode)
1728 gcc_assert (!valreg); /* have_untyped_call required. */
1730 valreg = gen_rtx_REG (mode, regno);
1733 emit_insn (targetm.gen_call_value (valreg,
1734 gen_rtx_MEM (FUNCTION_MODE, function),
1735 const0_rtx, NULL_RTX, const0_rtx));
1737 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1739 else
1740 gcc_unreachable ();
1742 /* Find the CALL insn we just emitted, and attach the register usage
1743 information. */
1744 call_insn = last_call_insn ();
1745 add_function_usage_to (call_insn, call_fusage);
1747 /* Restore the stack. */
1748 if (targetm.have_save_stack_nonlocal ())
1749 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1750 else
1751 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1752 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1754 OK_DEFER_POP;
1756 /* Return the address of the result block. */
1757 result = copy_addr_to_reg (XEXP (result, 0));
1758 return convert_memory_address (ptr_mode, result);
1761 /* Perform an untyped return. */
1763 static void
1764 expand_builtin_return (rtx result)
1766 int size, align, regno;
1767 fixed_size_mode mode;
1768 rtx reg;
1769 rtx_insn *call_fusage = 0;
1771 result = convert_memory_address (Pmode, result);
1773 apply_result_size ();
1774 result = gen_rtx_MEM (BLKmode, result);
1776 if (targetm.have_untyped_return ())
1778 rtx vector = result_vector (0, result);
1779 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1780 emit_barrier ();
1781 return;
1784 /* Restore the return value and note that each value is used. */
1785 size = 0;
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_result_mode[regno]) != VOIDmode)
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1793 emit_move_insn (reg, adjust_address (result, mode, size));
1795 push_to_sequence (call_fusage);
1796 emit_use (reg);
1797 call_fusage = get_insns ();
1798 end_sequence ();
1799 size += GET_MODE_SIZE (mode);
1802 /* Put the USE insns before the return. */
1803 emit_insn (call_fusage);
1805 /* Return whatever values was restored by jumping directly to the end
1806 of the function. */
1807 expand_naked_return ();
1810 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1812 static enum type_class
1813 type_to_class (tree type)
1815 switch (TREE_CODE (type))
1817 case VOID_TYPE: return void_type_class;
1818 case INTEGER_TYPE: return integer_type_class;
1819 case ENUMERAL_TYPE: return enumeral_type_class;
1820 case BOOLEAN_TYPE: return boolean_type_class;
1821 case POINTER_TYPE: return pointer_type_class;
1822 case REFERENCE_TYPE: return reference_type_class;
1823 case OFFSET_TYPE: return offset_type_class;
1824 case REAL_TYPE: return real_type_class;
1825 case COMPLEX_TYPE: return complex_type_class;
1826 case FUNCTION_TYPE: return function_type_class;
1827 case METHOD_TYPE: return method_type_class;
1828 case RECORD_TYPE: return record_type_class;
1829 case UNION_TYPE:
1830 case QUAL_UNION_TYPE: return union_type_class;
1831 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1832 ? string_type_class : array_type_class);
1833 case LANG_TYPE: return lang_type_class;
1834 default: return no_type_class;
1838 /* Expand a call EXP to __builtin_classify_type. */
1840 static rtx
1841 expand_builtin_classify_type (tree exp)
1843 if (call_expr_nargs (exp))
1844 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1845 return GEN_INT (no_type_class);
1848 /* This helper macro, meant to be used in mathfn_built_in below, determines
1849 which among a set of builtin math functions is appropriate for a given type
1850 mode. The `F' (float) and `L' (long double) are automatically generated
1851 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1852 types, there are additional types that are considered with 'F32', 'F64',
1853 'F128', etc. suffixes. */
1854 #define CASE_MATHFN(MATHFN) \
1855 CASE_CFN_##MATHFN: \
1856 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1857 fcodel = BUILT_IN_##MATHFN##L ; break;
1858 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1859 types. */
1860 #define CASE_MATHFN_FLOATN(MATHFN) \
1861 CASE_CFN_##MATHFN: \
1862 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1863 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1864 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1865 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1866 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1867 break;
1868 /* Similar to above, but appends _R after any F/L suffix. */
1869 #define CASE_MATHFN_REENT(MATHFN) \
1870 case CFN_BUILT_IN_##MATHFN##_R: \
1871 case CFN_BUILT_IN_##MATHFN##F_R: \
1872 case CFN_BUILT_IN_##MATHFN##L_R: \
1873 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1874 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1876 /* Return a function equivalent to FN but operating on floating-point
1877 values of type TYPE, or END_BUILTINS if no such function exists.
1878 This is purely an operation on function codes; it does not guarantee
1879 that the target actually has an implementation of the function. */
1881 static built_in_function
1882 mathfn_built_in_2 (tree type, combined_fn fn)
1884 tree mtype;
1885 built_in_function fcode, fcodef, fcodel;
1886 built_in_function fcodef16 = END_BUILTINS;
1887 built_in_function fcodef32 = END_BUILTINS;
1888 built_in_function fcodef64 = END_BUILTINS;
1889 built_in_function fcodef128 = END_BUILTINS;
1890 built_in_function fcodef32x = END_BUILTINS;
1891 built_in_function fcodef64x = END_BUILTINS;
1892 built_in_function fcodef128x = END_BUILTINS;
1894 switch (fn)
1896 CASE_MATHFN (ACOS)
1897 CASE_MATHFN (ACOSH)
1898 CASE_MATHFN (ASIN)
1899 CASE_MATHFN (ASINH)
1900 CASE_MATHFN (ATAN)
1901 CASE_MATHFN (ATAN2)
1902 CASE_MATHFN (ATANH)
1903 CASE_MATHFN (CBRT)
1904 CASE_MATHFN_FLOATN (CEIL)
1905 CASE_MATHFN (CEXPI)
1906 CASE_MATHFN_FLOATN (COPYSIGN)
1907 CASE_MATHFN (COS)
1908 CASE_MATHFN (COSH)
1909 CASE_MATHFN (DREM)
1910 CASE_MATHFN (ERF)
1911 CASE_MATHFN (ERFC)
1912 CASE_MATHFN (EXP)
1913 CASE_MATHFN (EXP10)
1914 CASE_MATHFN (EXP2)
1915 CASE_MATHFN (EXPM1)
1916 CASE_MATHFN (FABS)
1917 CASE_MATHFN (FDIM)
1918 CASE_MATHFN_FLOATN (FLOOR)
1919 CASE_MATHFN_FLOATN (FMA)
1920 CASE_MATHFN_FLOATN (FMAX)
1921 CASE_MATHFN_FLOATN (FMIN)
1922 CASE_MATHFN (FMOD)
1923 CASE_MATHFN (FREXP)
1924 CASE_MATHFN (GAMMA)
1925 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1926 CASE_MATHFN (HUGE_VAL)
1927 CASE_MATHFN (HYPOT)
1928 CASE_MATHFN (ILOGB)
1929 CASE_MATHFN (ICEIL)
1930 CASE_MATHFN (IFLOOR)
1931 CASE_MATHFN (INF)
1932 CASE_MATHFN (IRINT)
1933 CASE_MATHFN (IROUND)
1934 CASE_MATHFN (ISINF)
1935 CASE_MATHFN (J0)
1936 CASE_MATHFN (J1)
1937 CASE_MATHFN (JN)
1938 CASE_MATHFN (LCEIL)
1939 CASE_MATHFN (LDEXP)
1940 CASE_MATHFN (LFLOOR)
1941 CASE_MATHFN (LGAMMA)
1942 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1943 CASE_MATHFN (LLCEIL)
1944 CASE_MATHFN (LLFLOOR)
1945 CASE_MATHFN (LLRINT)
1946 CASE_MATHFN (LLROUND)
1947 CASE_MATHFN (LOG)
1948 CASE_MATHFN (LOG10)
1949 CASE_MATHFN (LOG1P)
1950 CASE_MATHFN (LOG2)
1951 CASE_MATHFN (LOGB)
1952 CASE_MATHFN (LRINT)
1953 CASE_MATHFN (LROUND)
1954 CASE_MATHFN (MODF)
1955 CASE_MATHFN (NAN)
1956 CASE_MATHFN (NANS)
1957 CASE_MATHFN_FLOATN (NEARBYINT)
1958 CASE_MATHFN (NEXTAFTER)
1959 CASE_MATHFN (NEXTTOWARD)
1960 CASE_MATHFN (POW)
1961 CASE_MATHFN (POWI)
1962 CASE_MATHFN (POW10)
1963 CASE_MATHFN (REMAINDER)
1964 CASE_MATHFN (REMQUO)
1965 CASE_MATHFN_FLOATN (RINT)
1966 CASE_MATHFN_FLOATN (ROUND)
1967 CASE_MATHFN (SCALB)
1968 CASE_MATHFN (SCALBLN)
1969 CASE_MATHFN (SCALBN)
1970 CASE_MATHFN (SIGNBIT)
1971 CASE_MATHFN (SIGNIFICAND)
1972 CASE_MATHFN (SIN)
1973 CASE_MATHFN (SINCOS)
1974 CASE_MATHFN (SINH)
1975 CASE_MATHFN_FLOATN (SQRT)
1976 CASE_MATHFN (TAN)
1977 CASE_MATHFN (TANH)
1978 CASE_MATHFN (TGAMMA)
1979 CASE_MATHFN_FLOATN (TRUNC)
1980 CASE_MATHFN (Y0)
1981 CASE_MATHFN (Y1)
1982 CASE_MATHFN (YN)
1984 default:
1985 return END_BUILTINS;
1988 mtype = TYPE_MAIN_VARIANT (type);
1989 if (mtype == double_type_node)
1990 return fcode;
1991 else if (mtype == float_type_node)
1992 return fcodef;
1993 else if (mtype == long_double_type_node)
1994 return fcodel;
1995 else if (mtype == float16_type_node)
1996 return fcodef16;
1997 else if (mtype == float32_type_node)
1998 return fcodef32;
1999 else if (mtype == float64_type_node)
2000 return fcodef64;
2001 else if (mtype == float128_type_node)
2002 return fcodef128;
2003 else if (mtype == float32x_type_node)
2004 return fcodef32x;
2005 else if (mtype == float64x_type_node)
2006 return fcodef64x;
2007 else if (mtype == float128x_type_node)
2008 return fcodef128x;
2009 else
2010 return END_BUILTINS;
2013 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2014 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2015 otherwise use the explicit declaration. If we can't do the conversion,
2016 return null. */
2018 static tree
2019 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2021 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2022 if (fcode2 == END_BUILTINS)
2023 return NULL_TREE;
2025 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2026 return NULL_TREE;
2028 return builtin_decl_explicit (fcode2);
2031 /* Like mathfn_built_in_1, but always use the implicit array. */
2033 tree
2034 mathfn_built_in (tree type, combined_fn fn)
2036 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2039 /* Like mathfn_built_in_1, but take a built_in_function and
2040 always use the implicit array. */
2042 tree
2043 mathfn_built_in (tree type, enum built_in_function fn)
2045 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2048 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2049 return its code, otherwise return IFN_LAST. Note that this function
2050 only tests whether the function is defined in internals.def, not whether
2051 it is actually available on the target. */
2053 internal_fn
2054 associated_internal_fn (tree fndecl)
2056 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2057 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2061 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2062 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2063 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2064 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2065 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2066 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2067 #include "internal-fn.def"
2069 CASE_FLT_FN (BUILT_IN_POW10):
2070 return IFN_EXP10;
2072 CASE_FLT_FN (BUILT_IN_DREM):
2073 return IFN_REMAINDER;
2075 CASE_FLT_FN (BUILT_IN_SCALBN):
2076 CASE_FLT_FN (BUILT_IN_SCALBLN):
2077 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2078 return IFN_LDEXP;
2079 return IFN_LAST;
2081 default:
2082 return IFN_LAST;
2086 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2087 on the current target by a call to an internal function, return the
2088 code of that internal function, otherwise return IFN_LAST. The caller
2089 is responsible for ensuring that any side-effects of the built-in
2090 call are dealt with correctly. E.g. if CALL sets errno, the caller
2091 must decide that the errno result isn't needed or make it available
2092 in some other way. */
2094 internal_fn
2095 replacement_internal_fn (gcall *call)
2097 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2099 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2100 if (ifn != IFN_LAST)
2102 tree_pair types = direct_internal_fn_types (ifn, call);
2103 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2104 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2105 return ifn;
2108 return IFN_LAST;
2111 /* Expand a call to the builtin trinary math functions (fma).
2112 Return NULL_RTX if a normal call should be emitted rather than expanding the
2113 function in-line. EXP is the expression that is a call to the builtin
2114 function; if convenient, the result should be placed in TARGET.
2115 SUBTARGET may be used as the target for computing one of EXP's
2116 operands. */
2118 static rtx
2119 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2121 optab builtin_optab;
2122 rtx op0, op1, op2, result;
2123 rtx_insn *insns;
2124 tree fndecl = get_callee_fndecl (exp);
2125 tree arg0, arg1, arg2;
2126 machine_mode mode;
2128 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2129 return NULL_RTX;
2131 arg0 = CALL_EXPR_ARG (exp, 0);
2132 arg1 = CALL_EXPR_ARG (exp, 1);
2133 arg2 = CALL_EXPR_ARG (exp, 2);
2135 switch (DECL_FUNCTION_CODE (fndecl))
2137 CASE_FLT_FN (BUILT_IN_FMA):
2138 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2139 builtin_optab = fma_optab; break;
2140 default:
2141 gcc_unreachable ();
2144 /* Make a suitable register to place result in. */
2145 mode = TYPE_MODE (TREE_TYPE (exp));
2147 /* Before working hard, check whether the instruction is available. */
2148 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2149 return NULL_RTX;
2151 result = gen_reg_rtx (mode);
2153 /* Always stabilize the argument list. */
2154 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2155 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2156 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2158 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2159 op1 = expand_normal (arg1);
2160 op2 = expand_normal (arg2);
2162 start_sequence ();
2164 /* Compute into RESULT.
2165 Set RESULT to wherever the result comes back. */
2166 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2167 result, 0);
2169 /* If we were unable to expand via the builtin, stop the sequence
2170 (without outputting the insns) and call to the library function
2171 with the stabilized argument list. */
2172 if (result == 0)
2174 end_sequence ();
2175 return expand_call (exp, target, target == const0_rtx);
2178 /* Output the entire sequence. */
2179 insns = get_insns ();
2180 end_sequence ();
2181 emit_insn (insns);
2183 return result;
2186 /* Expand a call to the builtin sin and cos math functions.
2187 Return NULL_RTX if a normal call should be emitted rather than expanding the
2188 function in-line. EXP is the expression that is a call to the builtin
2189 function; if convenient, the result should be placed in TARGET.
2190 SUBTARGET may be used as the target for computing one of EXP's
2191 operands. */
2193 static rtx
2194 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2196 optab builtin_optab;
2197 rtx op0;
2198 rtx_insn *insns;
2199 tree fndecl = get_callee_fndecl (exp);
2200 machine_mode mode;
2201 tree arg;
2203 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2204 return NULL_RTX;
2206 arg = CALL_EXPR_ARG (exp, 0);
2208 switch (DECL_FUNCTION_CODE (fndecl))
2210 CASE_FLT_FN (BUILT_IN_SIN):
2211 CASE_FLT_FN (BUILT_IN_COS):
2212 builtin_optab = sincos_optab; break;
2213 default:
2214 gcc_unreachable ();
2217 /* Make a suitable register to place result in. */
2218 mode = TYPE_MODE (TREE_TYPE (exp));
2220 /* Check if sincos insn is available, otherwise fallback
2221 to sin or cos insn. */
2222 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2223 switch (DECL_FUNCTION_CODE (fndecl))
2225 CASE_FLT_FN (BUILT_IN_SIN):
2226 builtin_optab = sin_optab; break;
2227 CASE_FLT_FN (BUILT_IN_COS):
2228 builtin_optab = cos_optab; break;
2229 default:
2230 gcc_unreachable ();
2233 /* Before working hard, check whether the instruction is available. */
2234 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2236 rtx result = gen_reg_rtx (mode);
2238 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2239 need to expand the argument again. This way, we will not perform
2240 side-effects more the once. */
2241 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2243 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2245 start_sequence ();
2247 /* Compute into RESULT.
2248 Set RESULT to wherever the result comes back. */
2249 if (builtin_optab == sincos_optab)
2251 int ok;
2253 switch (DECL_FUNCTION_CODE (fndecl))
2255 CASE_FLT_FN (BUILT_IN_SIN):
2256 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2257 break;
2258 CASE_FLT_FN (BUILT_IN_COS):
2259 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2260 break;
2261 default:
2262 gcc_unreachable ();
2264 gcc_assert (ok);
2266 else
2267 result = expand_unop (mode, builtin_optab, op0, result, 0);
2269 if (result != 0)
2271 /* Output the entire sequence. */
2272 insns = get_insns ();
2273 end_sequence ();
2274 emit_insn (insns);
2275 return result;
2278 /* If we were unable to expand via the builtin, stop the sequence
2279 (without outputting the insns) and call to the library function
2280 with the stabilized argument list. */
2281 end_sequence ();
2284 return expand_call (exp, target, target == const0_rtx);
2287 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2288 return an RTL instruction code that implements the functionality.
2289 If that isn't possible or available return CODE_FOR_nothing. */
2291 static enum insn_code
2292 interclass_mathfn_icode (tree arg, tree fndecl)
2294 bool errno_set = false;
2295 optab builtin_optab = unknown_optab;
2296 machine_mode mode;
2298 switch (DECL_FUNCTION_CODE (fndecl))
2300 CASE_FLT_FN (BUILT_IN_ILOGB):
2301 errno_set = true; builtin_optab = ilogb_optab; break;
2302 CASE_FLT_FN (BUILT_IN_ISINF):
2303 builtin_optab = isinf_optab; break;
2304 case BUILT_IN_ISNORMAL:
2305 case BUILT_IN_ISFINITE:
2306 CASE_FLT_FN (BUILT_IN_FINITE):
2307 case BUILT_IN_FINITED32:
2308 case BUILT_IN_FINITED64:
2309 case BUILT_IN_FINITED128:
2310 case BUILT_IN_ISINFD32:
2311 case BUILT_IN_ISINFD64:
2312 case BUILT_IN_ISINFD128:
2313 /* These builtins have no optabs (yet). */
2314 break;
2315 default:
2316 gcc_unreachable ();
2319 /* There's no easy way to detect the case we need to set EDOM. */
2320 if (flag_errno_math && errno_set)
2321 return CODE_FOR_nothing;
2323 /* Optab mode depends on the mode of the input argument. */
2324 mode = TYPE_MODE (TREE_TYPE (arg));
2326 if (builtin_optab)
2327 return optab_handler (builtin_optab, mode);
2328 return CODE_FOR_nothing;
2331 /* Expand a call to one of the builtin math functions that operate on
2332 floating point argument and output an integer result (ilogb, isinf,
2333 isnan, etc).
2334 Return 0 if a normal call should be emitted rather than expanding the
2335 function in-line. EXP is the expression that is a call to the builtin
2336 function; if convenient, the result should be placed in TARGET. */
2338 static rtx
2339 expand_builtin_interclass_mathfn (tree exp, rtx target)
2341 enum insn_code icode = CODE_FOR_nothing;
2342 rtx op0;
2343 tree fndecl = get_callee_fndecl (exp);
2344 machine_mode mode;
2345 tree arg;
2347 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2348 return NULL_RTX;
2350 arg = CALL_EXPR_ARG (exp, 0);
2351 icode = interclass_mathfn_icode (arg, fndecl);
2352 mode = TYPE_MODE (TREE_TYPE (arg));
2354 if (icode != CODE_FOR_nothing)
2356 struct expand_operand ops[1];
2357 rtx_insn *last = get_last_insn ();
2358 tree orig_arg = arg;
2360 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2361 need to expand the argument again. This way, we will not perform
2362 side-effects more the once. */
2363 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2365 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2367 if (mode != GET_MODE (op0))
2368 op0 = convert_to_mode (mode, op0, 0);
2370 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2371 if (maybe_legitimize_operands (icode, 0, 1, ops)
2372 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2373 return ops[0].value;
2375 delete_insns_since (last);
2376 CALL_EXPR_ARG (exp, 0) = orig_arg;
2379 return NULL_RTX;
2382 /* Expand a call to the builtin sincos math function.
2383 Return NULL_RTX if a normal call should be emitted rather than expanding the
2384 function in-line. EXP is the expression that is a call to the builtin
2385 function. */
2387 static rtx
2388 expand_builtin_sincos (tree exp)
2390 rtx op0, op1, op2, target1, target2;
2391 machine_mode mode;
2392 tree arg, sinp, cosp;
2393 int result;
2394 location_t loc = EXPR_LOCATION (exp);
2395 tree alias_type, alias_off;
2397 if (!validate_arglist (exp, REAL_TYPE,
2398 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2399 return NULL_RTX;
2401 arg = CALL_EXPR_ARG (exp, 0);
2402 sinp = CALL_EXPR_ARG (exp, 1);
2403 cosp = CALL_EXPR_ARG (exp, 2);
2405 /* Make a suitable register to place result in. */
2406 mode = TYPE_MODE (TREE_TYPE (arg));
2408 /* Check if sincos insn is available, otherwise emit the call. */
2409 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2410 return NULL_RTX;
2412 target1 = gen_reg_rtx (mode);
2413 target2 = gen_reg_rtx (mode);
2415 op0 = expand_normal (arg);
2416 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2417 alias_off = build_int_cst (alias_type, 0);
2418 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2419 sinp, alias_off));
2420 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2421 cosp, alias_off));
2423 /* Compute into target1 and target2.
2424 Set TARGET to wherever the result comes back. */
2425 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2426 gcc_assert (result);
2428 /* Move target1 and target2 to the memory locations indicated
2429 by op1 and op2. */
2430 emit_move_insn (op1, target1);
2431 emit_move_insn (op2, target2);
2433 return const0_rtx;
2436 /* Expand a call to the internal cexpi builtin to the sincos math function.
2437 EXP is the expression that is a call to the builtin function; if convenient,
2438 the result should be placed in TARGET. */
2440 static rtx
2441 expand_builtin_cexpi (tree exp, rtx target)
2443 tree fndecl = get_callee_fndecl (exp);
2444 tree arg, type;
2445 machine_mode mode;
2446 rtx op0, op1, op2;
2447 location_t loc = EXPR_LOCATION (exp);
2449 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2450 return NULL_RTX;
2452 arg = CALL_EXPR_ARG (exp, 0);
2453 type = TREE_TYPE (arg);
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2456 /* Try expanding via a sincos optab, fall back to emitting a libcall
2457 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2458 is only generated from sincos, cexp or if we have either of them. */
2459 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2461 op1 = gen_reg_rtx (mode);
2462 op2 = gen_reg_rtx (mode);
2464 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466 /* Compute into op1 and op2. */
2467 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2469 else if (targetm.libc_has_function (function_sincos))
2471 tree call, fn = NULL_TREE;
2472 tree top1, top2;
2473 rtx op1a, op2a;
2475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2476 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2478 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2480 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2481 else
2482 gcc_unreachable ();
2484 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2485 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2486 op1a = copy_addr_to_reg (XEXP (op1, 0));
2487 op2a = copy_addr_to_reg (XEXP (op2, 0));
2488 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2489 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2491 /* Make sure not to fold the sincos call again. */
2492 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2493 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2494 call, 3, arg, top1, top2));
2496 else
2498 tree call, fn = NULL_TREE, narg;
2499 tree ctype = build_complex_type (type);
2501 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2502 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2504 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2506 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2507 else
2508 gcc_unreachable ();
2510 /* If we don't have a decl for cexp create one. This is the
2511 friendliest fallback if the user calls __builtin_cexpi
2512 without full target C99 function support. */
2513 if (fn == NULL_TREE)
2515 tree fntype;
2516 const char *name = NULL;
2518 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2519 name = "cexpf";
2520 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2521 name = "cexp";
2522 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2523 name = "cexpl";
2525 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2526 fn = build_fn_decl (name, fntype);
2529 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2530 build_real (type, dconst0), arg);
2532 /* Make sure not to fold the cexp call again. */
2533 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2534 return expand_expr (build_call_nary (ctype, call, 1, narg),
2535 target, VOIDmode, EXPAND_NORMAL);
2538 /* Now build the proper return type. */
2539 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2540 make_tree (TREE_TYPE (arg), op2),
2541 make_tree (TREE_TYPE (arg), op1)),
2542 target, VOIDmode, EXPAND_NORMAL);
2545 /* Conveniently construct a function call expression. FNDECL names the
2546 function to be called, N is the number of arguments, and the "..."
2547 parameters are the argument expressions. Unlike build_call_exr
2548 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2550 static tree
2551 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2553 va_list ap;
2554 tree fntype = TREE_TYPE (fndecl);
2555 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2557 va_start (ap, n);
2558 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2559 va_end (ap);
2560 SET_EXPR_LOCATION (fn, loc);
2561 return fn;
2564 /* Expand a call to one of the builtin rounding functions gcc defines
2565 as an extension (lfloor and lceil). As these are gcc extensions we
2566 do not need to worry about setting errno to EDOM.
2567 If expanding via optab fails, lower expression to (int)(floor(x)).
2568 EXP is the expression that is a call to the builtin function;
2569 if convenient, the result should be placed in TARGET. */
2571 static rtx
2572 expand_builtin_int_roundingfn (tree exp, rtx target)
2574 convert_optab builtin_optab;
2575 rtx op0, tmp;
2576 rtx_insn *insns;
2577 tree fndecl = get_callee_fndecl (exp);
2578 enum built_in_function fallback_fn;
2579 tree fallback_fndecl;
2580 machine_mode mode;
2581 tree arg;
2583 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2584 gcc_unreachable ();
2586 arg = CALL_EXPR_ARG (exp, 0);
2588 switch (DECL_FUNCTION_CODE (fndecl))
2590 CASE_FLT_FN (BUILT_IN_ICEIL):
2591 CASE_FLT_FN (BUILT_IN_LCEIL):
2592 CASE_FLT_FN (BUILT_IN_LLCEIL):
2593 builtin_optab = lceil_optab;
2594 fallback_fn = BUILT_IN_CEIL;
2595 break;
2597 CASE_FLT_FN (BUILT_IN_IFLOOR):
2598 CASE_FLT_FN (BUILT_IN_LFLOOR):
2599 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2600 builtin_optab = lfloor_optab;
2601 fallback_fn = BUILT_IN_FLOOR;
2602 break;
2604 default:
2605 gcc_unreachable ();
2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2611 target = gen_reg_rtx (mode);
2613 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2614 need to expand the argument again. This way, we will not perform
2615 side-effects more the once. */
2616 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2618 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2620 start_sequence ();
2622 /* Compute into TARGET. */
2623 if (expand_sfix_optab (target, op0, builtin_optab))
2625 /* Output the entire sequence. */
2626 insns = get_insns ();
2627 end_sequence ();
2628 emit_insn (insns);
2629 return target;
2632 /* If we were unable to expand via the builtin, stop the sequence
2633 (without outputting the insns). */
2634 end_sequence ();
2636 /* Fall back to floating point rounding optab. */
2637 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2639 /* For non-C99 targets we may end up without a fallback fndecl here
2640 if the user called __builtin_lfloor directly. In this case emit
2641 a call to the floor/ceil variants nevertheless. This should result
2642 in the best user experience for not full C99 targets. */
2643 if (fallback_fndecl == NULL_TREE)
2645 tree fntype;
2646 const char *name = NULL;
2648 switch (DECL_FUNCTION_CODE (fndecl))
2650 case BUILT_IN_ICEIL:
2651 case BUILT_IN_LCEIL:
2652 case BUILT_IN_LLCEIL:
2653 name = "ceil";
2654 break;
2655 case BUILT_IN_ICEILF:
2656 case BUILT_IN_LCEILF:
2657 case BUILT_IN_LLCEILF:
2658 name = "ceilf";
2659 break;
2660 case BUILT_IN_ICEILL:
2661 case BUILT_IN_LCEILL:
2662 case BUILT_IN_LLCEILL:
2663 name = "ceill";
2664 break;
2665 case BUILT_IN_IFLOOR:
2666 case BUILT_IN_LFLOOR:
2667 case BUILT_IN_LLFLOOR:
2668 name = "floor";
2669 break;
2670 case BUILT_IN_IFLOORF:
2671 case BUILT_IN_LFLOORF:
2672 case BUILT_IN_LLFLOORF:
2673 name = "floorf";
2674 break;
2675 case BUILT_IN_IFLOORL:
2676 case BUILT_IN_LFLOORL:
2677 case BUILT_IN_LLFLOORL:
2678 name = "floorl";
2679 break;
2680 default:
2681 gcc_unreachable ();
2684 fntype = build_function_type_list (TREE_TYPE (arg),
2685 TREE_TYPE (arg), NULL_TREE);
2686 fallback_fndecl = build_fn_decl (name, fntype);
2689 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2691 tmp = expand_normal (exp);
2692 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2694 /* Truncate the result of floating point optab to integer
2695 via expand_fix (). */
2696 target = gen_reg_rtx (mode);
2697 expand_fix (target, tmp, 0);
2699 return target;
2702 /* Expand a call to one of the builtin math functions doing integer
2703 conversion (lrint).
2704 Return 0 if a normal call should be emitted rather than expanding the
2705 function in-line. EXP is the expression that is a call to the builtin
2706 function; if convenient, the result should be placed in TARGET. */
2708 static rtx
2709 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2711 convert_optab builtin_optab;
2712 rtx op0;
2713 rtx_insn *insns;
2714 tree fndecl = get_callee_fndecl (exp);
2715 tree arg;
2716 machine_mode mode;
2717 enum built_in_function fallback_fn = BUILT_IN_NONE;
2719 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2720 gcc_unreachable ();
2722 arg = CALL_EXPR_ARG (exp, 0);
2724 switch (DECL_FUNCTION_CODE (fndecl))
2726 CASE_FLT_FN (BUILT_IN_IRINT):
2727 fallback_fn = BUILT_IN_LRINT;
2728 gcc_fallthrough ();
2729 CASE_FLT_FN (BUILT_IN_LRINT):
2730 CASE_FLT_FN (BUILT_IN_LLRINT):
2731 builtin_optab = lrint_optab;
2732 break;
2734 CASE_FLT_FN (BUILT_IN_IROUND):
2735 fallback_fn = BUILT_IN_LROUND;
2736 gcc_fallthrough ();
2737 CASE_FLT_FN (BUILT_IN_LROUND):
2738 CASE_FLT_FN (BUILT_IN_LLROUND):
2739 builtin_optab = lround_optab;
2740 break;
2742 default:
2743 gcc_unreachable ();
2746 /* There's no easy way to detect the case we need to set EDOM. */
2747 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2748 return NULL_RTX;
2750 /* Make a suitable register to place result in. */
2751 mode = TYPE_MODE (TREE_TYPE (exp));
2753 /* There's no easy way to detect the case we need to set EDOM. */
2754 if (!flag_errno_math)
2756 rtx result = gen_reg_rtx (mode);
2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
2761 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2763 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2765 start_sequence ();
2767 if (expand_sfix_optab (result, op0, builtin_optab))
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2771 end_sequence ();
2772 emit_insn (insns);
2773 return result;
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns) and call to the library function
2778 with the stabilized argument list. */
2779 end_sequence ();
2782 if (fallback_fn != BUILT_IN_NONE)
2784 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2785 targets, (int) round (x) should never be transformed into
2786 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2787 a call to lround in the hope that the target provides at least some
2788 C99 functions. This should result in the best user experience for
2789 not full C99 targets. */
2790 tree fallback_fndecl = mathfn_built_in_1
2791 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2794 fallback_fndecl, 1, arg);
2796 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2797 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2798 return convert_to_mode (mode, target, 0);
2801 return expand_call (exp, target, target == const0_rtx);
2804 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2805 a normal call should be emitted rather than expanding the function
2806 in-line. EXP is the expression that is a call to the builtin
2807 function; if convenient, the result should be placed in TARGET. */
2809 static rtx
2810 expand_builtin_powi (tree exp, rtx target)
2812 tree arg0, arg1;
2813 rtx op0, op1;
2814 machine_mode mode;
2815 machine_mode mode2;
2817 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2818 return NULL_RTX;
2820 arg0 = CALL_EXPR_ARG (exp, 0);
2821 arg1 = CALL_EXPR_ARG (exp, 1);
2822 mode = TYPE_MODE (TREE_TYPE (exp));
2824 /* Emit a libcall to libgcc. */
2826 /* Mode of the 2nd argument must match that of an int. */
2827 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2829 if (target == NULL_RTX)
2830 target = gen_reg_rtx (mode);
2832 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2833 if (GET_MODE (op0) != mode)
2834 op0 = convert_to_mode (mode, op0, 0);
2835 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2836 if (GET_MODE (op1) != mode2)
2837 op1 = convert_to_mode (mode2, op1, 0);
2839 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2840 target, LCT_CONST, mode,
2841 op0, mode, op1, mode2);
2843 return target;
2846 /* Expand expression EXP which is a call to the strlen builtin. Return
2847 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2848 try to get the result in TARGET, if convenient. */
2850 static rtx
2851 expand_builtin_strlen (tree exp, rtx target,
2852 machine_mode target_mode)
2854 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2855 return NULL_RTX;
2857 struct expand_operand ops[4];
2858 rtx pat;
2859 tree len;
2860 tree src = CALL_EXPR_ARG (exp, 0);
2861 rtx src_reg;
2862 rtx_insn *before_strlen;
2863 machine_mode insn_mode;
2864 enum insn_code icode = CODE_FOR_nothing;
2865 unsigned int align;
2867 /* If the length can be computed at compile-time, return it. */
2868 len = c_strlen (src, 0);
2869 if (len)
2870 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2872 /* If the length can be computed at compile-time and is constant
2873 integer, but there are side-effects in src, evaluate
2874 src for side-effects, then return len.
2875 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2876 can be optimized into: i++; x = 3; */
2877 len = c_strlen (src, 1);
2878 if (len && TREE_CODE (len) == INTEGER_CST)
2880 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2881 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2884 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2886 /* If SRC is not a pointer type, don't do this operation inline. */
2887 if (align == 0)
2888 return NULL_RTX;
2890 /* Bail out if we can't compute strlen in the right mode. */
2891 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2893 icode = optab_handler (strlen_optab, insn_mode);
2894 if (icode != CODE_FOR_nothing)
2895 break;
2897 if (insn_mode == VOIDmode)
2898 return NULL_RTX;
2900 /* Make a place to hold the source address. We will not expand
2901 the actual source until we are sure that the expansion will
2902 not fail -- there are trees that cannot be expanded twice. */
2903 src_reg = gen_reg_rtx (Pmode);
2905 /* Mark the beginning of the strlen sequence so we can emit the
2906 source operand later. */
2907 before_strlen = get_last_insn ();
2909 create_output_operand (&ops[0], target, insn_mode);
2910 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2911 create_integer_operand (&ops[2], 0);
2912 create_integer_operand (&ops[3], align);
2913 if (!maybe_expand_insn (icode, 4, ops))
2914 return NULL_RTX;
2916 /* Check to see if the argument was declared attribute nonstring
2917 and if so, issue a warning since at this point it's not known
2918 to be nul-terminated. */
2919 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2921 /* Now that we are assured of success, expand the source. */
2922 start_sequence ();
2923 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2924 if (pat != src_reg)
2926 #ifdef POINTERS_EXTEND_UNSIGNED
2927 if (GET_MODE (pat) != Pmode)
2928 pat = convert_to_mode (Pmode, pat,
2929 POINTERS_EXTEND_UNSIGNED);
2930 #endif
2931 emit_move_insn (src_reg, pat);
2933 pat = get_insns ();
2934 end_sequence ();
2936 if (before_strlen)
2937 emit_insn_after (pat, before_strlen);
2938 else
2939 emit_insn_before (pat, get_insns ());
2941 /* Return the value in the proper mode for this function. */
2942 if (GET_MODE (ops[0].value) == target_mode)
2943 target = ops[0].value;
2944 else if (target != 0)
2945 convert_move (target, ops[0].value, 0);
2946 else
2947 target = convert_to_mode (target_mode, ops[0].value, 0);
2949 return target;
2952 /* Expand call EXP to the strnlen built-in, returning the result
2953 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2955 static rtx
2956 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2958 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2961 tree src = CALL_EXPR_ARG (exp, 0);
2962 tree bound = CALL_EXPR_ARG (exp, 1);
2964 if (!bound)
2965 return NULL_RTX;
2967 location_t loc = UNKNOWN_LOCATION;
2968 if (EXPR_HAS_LOCATION (exp))
2969 loc = EXPR_LOCATION (exp);
2971 tree maxobjsize = max_object_size ();
2972 tree func = get_callee_fndecl (exp);
2974 tree len = c_strlen (src, 0);
2976 if (TREE_CODE (bound) == INTEGER_CST)
2978 if (!TREE_NO_WARNING (exp)
2979 && tree_int_cst_lt (maxobjsize, bound)
2980 && warning_at (loc, OPT_Wstringop_overflow_,
2981 "%K%qD specified bound %E "
2982 "exceeds maximum object size %E",
2983 exp, func, bound, maxobjsize))
2984 TREE_NO_WARNING (exp) = true;
2986 if (!len || TREE_CODE (len) != INTEGER_CST)
2987 return NULL_RTX;
2989 len = fold_convert_loc (loc, size_type_node, len);
2990 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2991 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2994 if (TREE_CODE (bound) != SSA_NAME)
2995 return NULL_RTX;
2997 wide_int min, max;
2998 enum value_range_type rng = get_range_info (bound, &min, &max);
2999 if (rng != VR_RANGE)
3000 return NULL_RTX;
3002 if (!TREE_NO_WARNING (exp)
3003 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3004 && warning_at (loc, OPT_Wstringop_overflow_,
3005 "%K%qD specified bound [%wu, %wu] "
3006 "exceeds maximum object size %E",
3007 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3008 TREE_NO_WARNING (exp) = true;
3010 if (!len || TREE_CODE (len) != INTEGER_CST)
3011 return NULL_RTX;
3013 if (wi::gtu_p (min, wi::to_wide (len)))
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3020 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3021 bytes from constant string DATA + OFFSET and return it as target
3022 constant. */
3024 static rtx
3025 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3026 scalar_int_mode mode)
3028 const char *str = (const char *) data;
3030 gcc_assert (offset >= 0
3031 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3032 <= strlen (str) + 1));
3034 return c_readstr (str + offset, mode);
3037 /* LEN specify length of the block of memcpy/memset operation.
3038 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3039 In some cases we can make very likely guess on max size, then we
3040 set it into PROBABLE_MAX_SIZE. */
3042 static void
3043 determine_block_size (tree len, rtx len_rtx,
3044 unsigned HOST_WIDE_INT *min_size,
3045 unsigned HOST_WIDE_INT *max_size,
3046 unsigned HOST_WIDE_INT *probable_max_size)
3048 if (CONST_INT_P (len_rtx))
3050 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3051 return;
3053 else
3055 wide_int min, max;
3056 enum value_range_type range_type = VR_UNDEFINED;
3058 /* Determine bounds from the type. */
3059 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3060 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3061 else
3062 *min_size = 0;
3063 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3064 *probable_max_size = *max_size
3065 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3066 else
3067 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3069 if (TREE_CODE (len) == SSA_NAME)
3070 range_type = get_range_info (len, &min, &max);
3071 if (range_type == VR_RANGE)
3073 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3074 *min_size = min.to_uhwi ();
3075 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3076 *probable_max_size = *max_size = max.to_uhwi ();
3078 else if (range_type == VR_ANTI_RANGE)
3080 /* Anti range 0...N lets us to determine minimal size to N+1. */
3081 if (min == 0)
3083 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3084 *min_size = max.to_uhwi () + 1;
3086 /* Code like
3088 int n;
3089 if (n < 100)
3090 memcpy (a, b, n)
3092 Produce anti range allowing negative values of N. We still
3093 can use the information and make a guess that N is not negative.
3095 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3096 *probable_max_size = min.to_uhwi () - 1;
3099 gcc_checking_assert (*max_size <=
3100 (unsigned HOST_WIDE_INT)
3101 GET_MODE_MASK (GET_MODE (len_rtx)));
3104 /* Try to verify that the sizes and lengths of the arguments to a string
3105 manipulation function given by EXP are within valid bounds and that
3106 the operation does not lead to buffer overflow or read past the end.
3107 Arguments other than EXP may be null. When non-null, the arguments
3108 have the following meaning:
3109 DST is the destination of a copy call or NULL otherwise.
3110 SRC is the source of a copy call or NULL otherwise.
3111 DSTWRITE is the number of bytes written into the destination obtained
3112 from the user-supplied size argument to the function (such as in
3113 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3114 MAXREAD is the user-supplied bound on the length of the source sequence
3115 (such as in strncat(d, s, N). It specifies the upper limit on the number
3116 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3117 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3118 expression EXP is a string function call (as opposed to a memory call
3119 like memcpy). As an exception, SRCSTR can also be an integer denoting
3120 the precomputed size of the source string or object (for functions like
3121 memcpy).
3122 DSTSIZE is the size of the destination object specified by the last
3123 argument to the _chk builtins, typically resulting from the expansion
3124 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3125 DSTSIZE).
3127 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3128 SIZE_MAX.
3130 If the call is successfully verified as safe return true, otherwise
3131 return false. */
3133 static bool
3134 check_access (tree exp, tree, tree, tree dstwrite,
3135 tree maxread, tree srcstr, tree dstsize)
3137 int opt = OPT_Wstringop_overflow_;
3139 /* The size of the largest object is half the address space, or
3140 PTRDIFF_MAX. (This is way too permissive.) */
3141 tree maxobjsize = max_object_size ();
3143 /* Either the length of the source string for string functions or
3144 the size of the source object for raw memory functions. */
3145 tree slen = NULL_TREE;
3147 tree range[2] = { NULL_TREE, NULL_TREE };
3149 /* Set to true when the exact number of bytes written by a string
3150 function like strcpy is not known and the only thing that is
3151 known is that it must be at least one (for the terminating nul). */
3152 bool at_least_one = false;
3153 if (srcstr)
3155 /* SRCSTR is normally a pointer to string but as a special case
3156 it can be an integer denoting the length of a string. */
3157 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3159 /* Try to determine the range of lengths the source string
3160 refers to. If it can be determined and is less than
3161 the upper bound given by MAXREAD add one to it for
3162 the terminating nul. Otherwise, set it to one for
3163 the same reason, or to MAXREAD as appropriate. */
3164 get_range_strlen (srcstr, range);
3165 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3167 if (maxread && tree_int_cst_le (maxread, range[0]))
3168 range[0] = range[1] = maxread;
3169 else
3170 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3171 range[0], size_one_node);
3173 if (maxread && tree_int_cst_le (maxread, range[1]))
3174 range[1] = maxread;
3175 else if (!integer_all_onesp (range[1]))
3176 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3177 range[1], size_one_node);
3179 slen = range[0];
3181 else
3183 at_least_one = true;
3184 slen = size_one_node;
3187 else
3188 slen = srcstr;
3191 if (!dstwrite && !maxread)
3193 /* When the only available piece of data is the object size
3194 there is nothing to do. */
3195 if (!slen)
3196 return true;
3198 /* Otherwise, when the length of the source sequence is known
3199 (as with strlen), set DSTWRITE to it. */
3200 if (!range[0])
3201 dstwrite = slen;
3204 if (!dstsize)
3205 dstsize = maxobjsize;
3207 if (dstwrite)
3208 get_size_range (dstwrite, range);
3210 tree func = get_callee_fndecl (exp);
3212 /* First check the number of bytes to be written against the maximum
3213 object size. */
3214 if (range[0]
3215 && TREE_CODE (range[0]) == INTEGER_CST
3216 && tree_int_cst_lt (maxobjsize, range[0]))
3218 if (TREE_NO_WARNING (exp))
3219 return false;
3221 location_t loc = tree_nonartificial_location (exp);
3222 loc = expansion_point_location_if_in_system_header (loc);
3224 bool warned;
3225 if (range[0] == range[1])
3226 warned = warning_at (loc, opt,
3227 "%K%qD specified size %E "
3228 "exceeds maximum object size %E",
3229 exp, func, range[0], maxobjsize);
3230 else
3231 warned = warning_at (loc, opt,
3232 "%K%qD specified size between %E and %E "
3233 "exceeds maximum object size %E",
3234 exp, func,
3235 range[0], range[1], maxobjsize);
3236 if (warned)
3237 TREE_NO_WARNING (exp) = true;
3239 return false;
3242 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3243 constant, and in range of unsigned HOST_WIDE_INT. */
3244 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3246 /* Next check the number of bytes to be written against the destination
3247 object size. */
3248 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3250 if (range[0]
3251 && TREE_CODE (range[0]) == INTEGER_CST
3252 && ((tree_fits_uhwi_p (dstsize)
3253 && tree_int_cst_lt (dstsize, range[0]))
3254 || (dstwrite
3255 && tree_fits_uhwi_p (dstwrite)
3256 && tree_int_cst_lt (dstwrite, range[0]))))
3258 if (TREE_NO_WARNING (exp))
3259 return false;
3261 location_t loc = tree_nonartificial_location (exp);
3262 loc = expansion_point_location_if_in_system_header (loc);
3264 if (dstwrite == slen && at_least_one)
3266 /* This is a call to strcpy with a destination of 0 size
3267 and a source of unknown length. The call will write
3268 at least one byte past the end of the destination. */
3269 warning_at (loc, opt,
3270 "%K%qD writing %E or more bytes into a region "
3271 "of size %E overflows the destination",
3272 exp, func, range[0], dstsize);
3274 else if (tree_int_cst_equal (range[0], range[1]))
3275 warning_n (loc, opt, tree_to_uhwi (range[0]),
3276 "%K%qD writing %E byte into a region "
3277 "of size %E overflows the destination",
3278 "%K%qD writing %E bytes into a region "
3279 "of size %E overflows the destination",
3280 exp, func, range[0], dstsize);
3281 else if (tree_int_cst_sign_bit (range[1]))
3283 /* Avoid printing the upper bound if it's invalid. */
3284 warning_at (loc, opt,
3285 "%K%qD writing %E or more bytes into a region "
3286 "of size %E overflows the destination",
3287 exp, func, range[0], dstsize);
3289 else
3290 warning_at (loc, opt,
3291 "%K%qD writing between %E and %E bytes into "
3292 "a region of size %E overflows the destination",
3293 exp, func, range[0], range[1],
3294 dstsize);
3296 /* Return error when an overflow has been detected. */
3297 return false;
3301 /* Check the maximum length of the source sequence against the size
3302 of the destination object if known, or against the maximum size
3303 of an object. */
3304 if (maxread)
3306 get_size_range (maxread, range);
3308 /* Use the lower end for MAXREAD from now on. */
3309 if (range[0])
3310 maxread = range[0];
3312 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3314 location_t loc = tree_nonartificial_location (exp);
3315 loc = expansion_point_location_if_in_system_header (loc);
3317 if (tree_int_cst_lt (maxobjsize, range[0]))
3319 if (TREE_NO_WARNING (exp))
3320 return false;
3322 /* Warn about crazy big sizes first since that's more
3323 likely to be meaningful than saying that the bound
3324 is greater than the object size if both are big. */
3325 if (range[0] == range[1])
3326 warning_at (loc, opt,
3327 "%K%qD specified bound %E "
3328 "exceeds maximum object size %E",
3329 exp, func,
3330 range[0], maxobjsize);
3331 else
3332 warning_at (loc, opt,
3333 "%K%qD specified bound between %E and %E "
3334 "exceeds maximum object size %E",
3335 exp, func,
3336 range[0], range[1], maxobjsize);
3338 return false;
3341 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3343 if (TREE_NO_WARNING (exp))
3344 return false;
3346 if (tree_int_cst_equal (range[0], range[1]))
3347 warning_at (loc, opt,
3348 "%K%qD specified bound %E "
3349 "exceeds destination size %E",
3350 exp, func,
3351 range[0], dstsize);
3352 else
3353 warning_at (loc, opt,
3354 "%K%qD specified bound between %E and %E "
3355 "exceeds destination size %E",
3356 exp, func,
3357 range[0], range[1], dstsize);
3358 return false;
3363 /* Check for reading past the end of SRC. */
3364 if (slen
3365 && slen == srcstr
3366 && dstwrite && range[0]
3367 && tree_int_cst_lt (slen, range[0]))
3369 if (TREE_NO_WARNING (exp))
3370 return false;
3372 location_t loc = tree_nonartificial_location (exp);
3374 if (tree_int_cst_equal (range[0], range[1]))
3375 warning_n (loc, opt, tree_to_uhwi (range[0]),
3376 "%K%qD reading %E byte from a region of size %E",
3377 "%K%qD reading %E bytes from a region of size %E",
3378 exp, func, range[0], slen);
3379 else if (tree_int_cst_sign_bit (range[1]))
3381 /* Avoid printing the upper bound if it's invalid. */
3382 warning_at (loc, opt,
3383 "%K%qD reading %E or more bytes from a region "
3384 "of size %E",
3385 exp, func, range[0], slen);
3387 else
3388 warning_at (loc, opt,
3389 "%K%qD reading between %E and %E bytes from a region "
3390 "of size %E",
3391 exp, func, range[0], range[1], slen);
3392 return false;
3395 return true;
3398 /* Helper to compute the size of the object referenced by the DEST
3399 expression which must have pointer type, using Object Size type
3400 OSTYPE (only the least significant 2 bits are used). Return
3401 an estimate of the size of the object if successful or NULL when
3402 the size cannot be determined. When the referenced object involves
3403 a non-constant offset in some range the returned value represents
3404 the largest size given the smallest non-negative offset in the
3405 range. The function is intended for diagnostics and should not
3406 be used to influence code generation or optimization. */
3408 tree
3409 compute_objsize (tree dest, int ostype)
3411 unsigned HOST_WIDE_INT size;
3413 /* Only the two least significant bits are meaningful. */
3414 ostype &= 3;
3416 if (compute_builtin_object_size (dest, ostype, &size))
3417 return build_int_cst (sizetype, size);
3419 if (TREE_CODE (dest) == SSA_NAME)
3421 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3422 if (!is_gimple_assign (stmt))
3423 return NULL_TREE;
3425 dest = gimple_assign_rhs1 (stmt);
3427 tree_code code = gimple_assign_rhs_code (stmt);
3428 if (code == POINTER_PLUS_EXPR)
3430 /* compute_builtin_object_size fails for addresses with
3431 non-constant offsets. Try to determine the range of
3432 such an offset here and use it to adjust the constant
3433 size. */
3434 tree off = gimple_assign_rhs2 (stmt);
3435 if (TREE_CODE (off) == INTEGER_CST)
3437 if (tree size = compute_objsize (dest, ostype))
3439 wide_int wioff = wi::to_wide (off);
3440 wide_int wisiz = wi::to_wide (size);
3442 /* Ignore negative offsets for now. For others,
3443 use the lower bound as the most optimistic
3444 estimate of the (remaining) size. */
3445 if (wi::sign_mask (wioff))
3447 else if (wi::ltu_p (wioff, wisiz))
3448 return wide_int_to_tree (TREE_TYPE (size),
3449 wi::sub (wisiz, wioff));
3450 else
3451 return size_zero_node;
3454 else if (TREE_CODE (off) == SSA_NAME
3455 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3457 wide_int min, max;
3458 enum value_range_type rng = get_range_info (off, &min, &max);
3460 if (rng == VR_RANGE)
3462 if (tree size = compute_objsize (dest, ostype))
3464 wide_int wisiz = wi::to_wide (size);
3466 /* Ignore negative offsets for now. For others,
3467 use the lower bound as the most optimistic
3468 estimate of the (remaining)size. */
3469 if (wi::sign_mask (min))
3471 else if (wi::ltu_p (min, wisiz))
3472 return wide_int_to_tree (TREE_TYPE (size),
3473 wi::sub (wisiz, min));
3474 else
3475 return size_zero_node;
3480 else if (code != ADDR_EXPR)
3481 return NULL_TREE;
3484 /* Unless computing the largest size (for memcpy and other raw memory
3485 functions), try to determine the size of the object from its type. */
3486 if (!ostype)
3487 return NULL_TREE;
3489 if (TREE_CODE (dest) != ADDR_EXPR)
3490 return NULL_TREE;
3492 tree type = TREE_TYPE (dest);
3493 if (TREE_CODE (type) == POINTER_TYPE)
3494 type = TREE_TYPE (type);
3496 type = TYPE_MAIN_VARIANT (type);
3498 if (TREE_CODE (type) == ARRAY_TYPE
3499 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3501 /* Return the constant size unless it's zero (that's a zero-length
3502 array likely at the end of a struct). */
3503 tree size = TYPE_SIZE_UNIT (type);
3504 if (size && TREE_CODE (size) == INTEGER_CST
3505 && !integer_zerop (size))
3506 return size;
3509 return NULL_TREE;
3512 /* Helper to determine and check the sizes of the source and the destination
3513 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3514 call expression, DEST is the destination argument, SRC is the source
3515 argument or null, and LEN is the number of bytes. Use Object Size type-0
3516 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3517 (no overflow or invalid sizes), false otherwise. */
3519 static bool
3520 check_memop_access (tree exp, tree dest, tree src, tree size)
3522 /* For functions like memset and memcpy that operate on raw memory
3523 try to determine the size of the largest source and destination
3524 object using type-0 Object Size regardless of the object size
3525 type specified by the option. */
3526 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3527 tree dstsize = compute_objsize (dest, 0);
3529 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3530 srcsize, dstsize);
3533 /* Validate memchr arguments without performing any expansion.
3534 Return NULL_RTX. */
3536 static rtx
3537 expand_builtin_memchr (tree exp, rtx)
3539 if (!validate_arglist (exp,
3540 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3541 return NULL_RTX;
3543 tree arg1 = CALL_EXPR_ARG (exp, 0);
3544 tree len = CALL_EXPR_ARG (exp, 2);
3546 /* Diagnose calls where the specified length exceeds the size
3547 of the object. */
3548 if (warn_stringop_overflow)
3550 tree size = compute_objsize (arg1, 0);
3551 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3552 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3555 return NULL_RTX;
3558 /* Expand a call EXP to the memcpy builtin.
3559 Return NULL_RTX if we failed, the caller should emit a normal call,
3560 otherwise try to get the result in TARGET, if convenient (and in
3561 mode MODE if that's convenient). */
3563 static rtx
3564 expand_builtin_memcpy (tree exp, rtx target)
3566 if (!validate_arglist (exp,
3567 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3570 tree dest = CALL_EXPR_ARG (exp, 0);
3571 tree src = CALL_EXPR_ARG (exp, 1);
3572 tree len = CALL_EXPR_ARG (exp, 2);
3574 check_memop_access (exp, dest, src, len);
3576 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3577 /*endp=*/ 0);
3580 /* Check a call EXP to the memmove built-in for validity.
3581 Return NULL_RTX on both success and failure. */
3583 static rtx
3584 expand_builtin_memmove (tree exp, rtx)
3586 if (!validate_arglist (exp,
3587 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3588 return NULL_RTX;
3590 tree dest = CALL_EXPR_ARG (exp, 0);
3591 tree src = CALL_EXPR_ARG (exp, 1);
3592 tree len = CALL_EXPR_ARG (exp, 2);
3594 check_memop_access (exp, dest, src, len);
3596 return NULL_RTX;
3599 /* Expand a call EXP to the mempcpy builtin.
3600 Return NULL_RTX if we failed; the caller should emit a normal call,
3601 otherwise try to get the result in TARGET, if convenient (and in
3602 mode MODE if that's convenient). If ENDP is 0 return the
3603 destination pointer, if ENDP is 1 return the end pointer ala
3604 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3605 stpcpy. */
3607 static rtx
3608 expand_builtin_mempcpy (tree exp, rtx target)
3610 if (!validate_arglist (exp,
3611 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3612 return NULL_RTX;
3614 tree dest = CALL_EXPR_ARG (exp, 0);
3615 tree src = CALL_EXPR_ARG (exp, 1);
3616 tree len = CALL_EXPR_ARG (exp, 2);
3618 /* Policy does not generally allow using compute_objsize (which
3619 is used internally by check_memop_size) to change code generation
3620 or drive optimization decisions.
3622 In this instance it is safe because the code we generate has
3623 the same semantics regardless of the return value of
3624 check_memop_sizes. Exactly the same amount of data is copied
3625 and the return value is exactly the same in both cases.
3627 Furthermore, check_memop_size always uses mode 0 for the call to
3628 compute_objsize, so the imprecise nature of compute_objsize is
3629 avoided. */
3631 /* Avoid expanding mempcpy into memcpy when the call is determined
3632 to overflow the buffer. This also prevents the same overflow
3633 from being diagnosed again when expanding memcpy. */
3634 if (!check_memop_access (exp, dest, src, len))
3635 return NULL_RTX;
3637 return expand_builtin_mempcpy_args (dest, src, len,
3638 target, exp, /*endp=*/ 1);
3641 /* Helper function to do the actual work for expand of memory copy family
3642 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3643 of memory from SRC to DEST and assign to TARGET if convenient.
3644 If ENDP is 0 return the
3645 destination pointer, if ENDP is 1 return the end pointer ala
3646 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3647 stpcpy. */
3649 static rtx
3650 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3651 rtx target, tree exp, int endp)
3653 const char *src_str;
3654 unsigned int src_align = get_pointer_alignment (src);
3655 unsigned int dest_align = get_pointer_alignment (dest);
3656 rtx dest_mem, src_mem, dest_addr, len_rtx;
3657 HOST_WIDE_INT expected_size = -1;
3658 unsigned int expected_align = 0;
3659 unsigned HOST_WIDE_INT min_size;
3660 unsigned HOST_WIDE_INT max_size;
3661 unsigned HOST_WIDE_INT probable_max_size;
3663 /* If DEST is not a pointer type, call the normal function. */
3664 if (dest_align == 0)
3665 return NULL_RTX;
3667 /* If either SRC is not a pointer type, don't do this
3668 operation in-line. */
3669 if (src_align == 0)
3670 return NULL_RTX;
3672 if (currently_expanding_gimple_stmt)
3673 stringop_block_profile (currently_expanding_gimple_stmt,
3674 &expected_align, &expected_size);
3676 if (expected_align < dest_align)
3677 expected_align = dest_align;
3678 dest_mem = get_memory_rtx (dest, len);
3679 set_mem_align (dest_mem, dest_align);
3680 len_rtx = expand_normal (len);
3681 determine_block_size (len, len_rtx, &min_size, &max_size,
3682 &probable_max_size);
3683 src_str = c_getstr (src);
3685 /* If SRC is a string constant and block move would be done
3686 by pieces, we can avoid loading the string from memory
3687 and only stored the computed constants. */
3688 if (src_str
3689 && CONST_INT_P (len_rtx)
3690 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3691 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3692 CONST_CAST (char *, src_str),
3693 dest_align, false))
3695 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3696 builtin_memcpy_read_str,
3697 CONST_CAST (char *, src_str),
3698 dest_align, false, endp);
3699 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3700 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3701 return dest_mem;
3704 src_mem = get_memory_rtx (src, len);
3705 set_mem_align (src_mem, src_align);
3707 /* Copy word part most expediently. */
3708 enum block_op_methods method = BLOCK_OP_NORMAL;
3709 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3710 method = BLOCK_OP_TAILCALL;
3711 if (endp == 1 && target != const0_rtx)
3712 method = BLOCK_OP_NO_LIBCALL_RET;
3713 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3714 expected_align, expected_size,
3715 min_size, max_size, probable_max_size);
3716 if (dest_addr == pc_rtx)
3717 return NULL_RTX;
3719 if (dest_addr == 0)
3721 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3722 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3725 if (endp && target != const0_rtx)
3727 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3728 /* stpcpy pointer to last byte. */
3729 if (endp == 2)
3730 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3733 return dest_addr;
3736 static rtx
3737 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3738 rtx target, tree orig_exp, int endp)
3740 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3741 endp);
3744 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3745 we failed, the caller should emit a normal call, otherwise try to
3746 get the result in TARGET, if convenient. If ENDP is 0 return the
3747 destination pointer, if ENDP is 1 return the end pointer ala
3748 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3749 stpcpy. */
3751 static rtx
3752 expand_movstr (tree dest, tree src, rtx target, int endp)
3754 struct expand_operand ops[3];
3755 rtx dest_mem;
3756 rtx src_mem;
3758 if (!targetm.have_movstr ())
3759 return NULL_RTX;
3761 dest_mem = get_memory_rtx (dest, NULL);
3762 src_mem = get_memory_rtx (src, NULL);
3763 if (!endp)
3765 target = force_reg (Pmode, XEXP (dest_mem, 0));
3766 dest_mem = replace_equiv_address (dest_mem, target);
3769 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3770 create_fixed_operand (&ops[1], dest_mem);
3771 create_fixed_operand (&ops[2], src_mem);
3772 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3773 return NULL_RTX;
3775 if (endp && target != const0_rtx)
3777 target = ops[0].value;
3778 /* movstr is supposed to set end to the address of the NUL
3779 terminator. If the caller requested a mempcpy-like return value,
3780 adjust it. */
3781 if (endp == 1)
3783 rtx tem = plus_constant (GET_MODE (target),
3784 gen_lowpart (GET_MODE (target), target), 1);
3785 emit_move_insn (target, force_operand (tem, NULL_RTX));
3788 return target;
3791 /* Do some very basic size validation of a call to the strcpy builtin
3792 given by EXP. Return NULL_RTX to have the built-in expand to a call
3793 to the library function. */
3795 static rtx
3796 expand_builtin_strcat (tree exp, rtx)
3798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3799 || !warn_stringop_overflow)
3800 return NULL_RTX;
3802 tree dest = CALL_EXPR_ARG (exp, 0);
3803 tree src = CALL_EXPR_ARG (exp, 1);
3805 /* There is no way here to determine the length of the string in
3806 the destination to which the SRC string is being appended so
3807 just diagnose cases when the souce string is longer than
3808 the destination object. */
3810 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3812 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3813 destsize);
3815 return NULL_RTX;
3818 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3819 NULL_RTX if we failed the caller should emit a normal call, otherwise
3820 try to get the result in TARGET, if convenient (and in mode MODE if that's
3821 convenient). */
3823 static rtx
3824 expand_builtin_strcpy (tree exp, rtx target)
3826 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
3832 if (warn_stringop_overflow)
3834 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3835 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3836 src, destsize);
3839 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3841 /* Check to see if the argument was declared attribute nonstring
3842 and if so, issue a warning since at this point it's not known
3843 to be nul-terminated. */
3844 tree fndecl = get_callee_fndecl (exp);
3845 maybe_warn_nonstring_arg (fndecl, exp);
3846 return ret;
3849 return NULL_RTX;
3852 /* Helper function to do the actual work for expand_builtin_strcpy. The
3853 arguments to the builtin_strcpy call DEST and SRC are broken out
3854 so that this can also be called without constructing an actual CALL_EXPR.
3855 The other arguments and return value are the same as for
3856 expand_builtin_strcpy. */
3858 static rtx
3859 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3861 return expand_movstr (dest, src, target, /*endp=*/0);
3864 /* Expand a call EXP to the stpcpy builtin.
3865 Return NULL_RTX if we failed the caller should emit a normal call,
3866 otherwise try to get the result in TARGET, if convenient (and in
3867 mode MODE if that's convenient). */
3869 static rtx
3870 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3872 tree dst, src;
3873 location_t loc = EXPR_LOCATION (exp);
3875 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3876 return NULL_RTX;
3878 dst = CALL_EXPR_ARG (exp, 0);
3879 src = CALL_EXPR_ARG (exp, 1);
3881 if (warn_stringop_overflow)
3883 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3884 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3885 src, destsize);
3888 /* If return value is ignored, transform stpcpy into strcpy. */
3889 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3891 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3892 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3893 return expand_expr (result, target, mode, EXPAND_NORMAL);
3895 else
3897 tree len, lenp1;
3898 rtx ret;
3900 /* Ensure we get an actual string whose length can be evaluated at
3901 compile-time, not an expression containing a string. This is
3902 because the latter will potentially produce pessimized code
3903 when used to produce the return value. */
3904 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3905 return expand_movstr (dst, src, target, /*endp=*/2);
3907 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3908 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3909 target, exp, /*endp=*/2);
3911 if (ret)
3912 return ret;
3914 if (TREE_CODE (len) == INTEGER_CST)
3916 rtx len_rtx = expand_normal (len);
3918 if (CONST_INT_P (len_rtx))
3920 ret = expand_builtin_strcpy_args (dst, src, target);
3922 if (ret)
3924 if (! target)
3926 if (mode != VOIDmode)
3927 target = gen_reg_rtx (mode);
3928 else
3929 target = gen_reg_rtx (GET_MODE (ret));
3931 if (GET_MODE (target) != GET_MODE (ret))
3932 ret = gen_lowpart (GET_MODE (target), ret);
3934 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3935 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3936 gcc_assert (ret);
3938 return target;
3943 return expand_movstr (dst, src, target, /*endp=*/2);
3947 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3948 arguments while being careful to avoid duplicate warnings (which could
3949 be issued if the expander were to expand the call, resulting in it
3950 being emitted in expand_call(). */
3952 static rtx
3953 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3955 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3957 /* The call has been successfully expanded. Check for nonstring
3958 arguments and issue warnings as appropriate. */
3959 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3960 return ret;
3963 return NULL_RTX;
3966 /* Check a call EXP to the stpncpy built-in for validity.
3967 Return NULL_RTX on both success and failure. */
3969 static rtx
3970 expand_builtin_stpncpy (tree exp, rtx)
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3974 || !warn_stringop_overflow)
3975 return NULL_RTX;
3977 /* The source and destination of the call. */
3978 tree dest = CALL_EXPR_ARG (exp, 0);
3979 tree src = CALL_EXPR_ARG (exp, 1);
3981 /* The exact number of bytes to write (not the maximum). */
3982 tree len = CALL_EXPR_ARG (exp, 2);
3984 /* The size of the destination object. */
3985 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3987 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3989 return NULL_RTX;
3992 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3993 bytes from constant string DATA + OFFSET and return it as target
3994 constant. */
3997 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3998 scalar_int_mode mode)
4000 const char *str = (const char *) data;
4002 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4003 return const0_rtx;
4005 return c_readstr (str + offset, mode);
4008 /* Helper to check the sizes of sequences and the destination of calls
4009 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4010 success (no overflow or invalid sizes), false otherwise. */
4012 static bool
4013 check_strncat_sizes (tree exp, tree objsize)
4015 tree dest = CALL_EXPR_ARG (exp, 0);
4016 tree src = CALL_EXPR_ARG (exp, 1);
4017 tree maxread = CALL_EXPR_ARG (exp, 2);
4019 /* Try to determine the range of lengths that the source expression
4020 refers to. */
4021 tree lenrange[2];
4022 get_range_strlen (src, lenrange);
4024 /* Try to verify that the destination is big enough for the shortest
4025 string. */
4027 if (!objsize && warn_stringop_overflow)
4029 /* If it hasn't been provided by __strncat_chk, try to determine
4030 the size of the destination object into which the source is
4031 being copied. */
4032 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4035 /* Add one for the terminating nul. */
4036 tree srclen = (lenrange[0]
4037 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4038 size_one_node)
4039 : NULL_TREE);
4041 /* The strncat function copies at most MAXREAD bytes and always appends
4042 the terminating nul so the specified upper bound should never be equal
4043 to (or greater than) the size of the destination. */
4044 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4045 && tree_int_cst_equal (objsize, maxread))
4047 location_t loc = tree_nonartificial_location (exp);
4048 loc = expansion_point_location_if_in_system_header (loc);
4050 warning_at (loc, OPT_Wstringop_overflow_,
4051 "%K%qD specified bound %E equals destination size",
4052 exp, get_callee_fndecl (exp), maxread);
4054 return false;
4057 if (!srclen
4058 || (maxread && tree_fits_uhwi_p (maxread)
4059 && tree_fits_uhwi_p (srclen)
4060 && tree_int_cst_lt (maxread, srclen)))
4061 srclen = maxread;
4063 /* The number of bytes to write is LEN but check_access will also
4064 check SRCLEN if LEN's value isn't known. */
4065 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4066 objsize);
4069 /* Similar to expand_builtin_strcat, do some very basic size validation
4070 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4071 the built-in expand to a call to the library function. */
4073 static rtx
4074 expand_builtin_strncat (tree exp, rtx)
4076 if (!validate_arglist (exp,
4077 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4078 || !warn_stringop_overflow)
4079 return NULL_RTX;
4081 tree dest = CALL_EXPR_ARG (exp, 0);
4082 tree src = CALL_EXPR_ARG (exp, 1);
4083 /* The upper bound on the number of bytes to write. */
4084 tree maxread = CALL_EXPR_ARG (exp, 2);
4085 /* The length of the source sequence. */
4086 tree slen = c_strlen (src, 1);
4088 /* Try to determine the range of lengths that the source expression
4089 refers to. */
4090 tree lenrange[2];
4091 if (slen)
4092 lenrange[0] = lenrange[1] = slen;
4093 else
4094 get_range_strlen (src, lenrange);
4096 /* Try to verify that the destination is big enough for the shortest
4097 string. First try to determine the size of the destination object
4098 into which the source is being copied. */
4099 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4101 /* Add one for the terminating nul. */
4102 tree srclen = (lenrange[0]
4103 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4104 size_one_node)
4105 : NULL_TREE);
4107 /* The strncat function copies at most MAXREAD bytes and always appends
4108 the terminating nul so the specified upper bound should never be equal
4109 to (or greater than) the size of the destination. */
4110 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4111 && tree_int_cst_equal (destsize, maxread))
4113 location_t loc = tree_nonartificial_location (exp);
4114 loc = expansion_point_location_if_in_system_header (loc);
4116 warning_at (loc, OPT_Wstringop_overflow_,
4117 "%K%qD specified bound %E equals destination size",
4118 exp, get_callee_fndecl (exp), maxread);
4120 return NULL_RTX;
4123 if (!srclen
4124 || (maxread && tree_fits_uhwi_p (maxread)
4125 && tree_fits_uhwi_p (srclen)
4126 && tree_int_cst_lt (maxread, srclen)))
4127 srclen = maxread;
4129 /* The number of bytes to write is SRCLEN. */
4130 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4132 return NULL_RTX;
4135 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4136 NULL_RTX if we failed the caller should emit a normal call. */
4138 static rtx
4139 expand_builtin_strncpy (tree exp, rtx target)
4141 location_t loc = EXPR_LOCATION (exp);
4143 if (validate_arglist (exp,
4144 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4146 tree dest = CALL_EXPR_ARG (exp, 0);
4147 tree src = CALL_EXPR_ARG (exp, 1);
4148 /* The number of bytes to write (not the maximum). */
4149 tree len = CALL_EXPR_ARG (exp, 2);
4150 /* The length of the source sequence. */
4151 tree slen = c_strlen (src, 1);
4153 if (warn_stringop_overflow)
4155 tree destsize = compute_objsize (dest,
4156 warn_stringop_overflow - 1);
4158 /* The number of bytes to write is LEN but check_access will also
4159 check SLEN if LEN's value isn't known. */
4160 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4161 destsize);
4164 /* We must be passed a constant len and src parameter. */
4165 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4166 return NULL_RTX;
4168 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4170 /* We're required to pad with trailing zeros if the requested
4171 len is greater than strlen(s2)+1. In that case try to
4172 use store_by_pieces, if it fails, punt. */
4173 if (tree_int_cst_lt (slen, len))
4175 unsigned int dest_align = get_pointer_alignment (dest);
4176 const char *p = c_getstr (src);
4177 rtx dest_mem;
4179 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4180 || !can_store_by_pieces (tree_to_uhwi (len),
4181 builtin_strncpy_read_str,
4182 CONST_CAST (char *, p),
4183 dest_align, false))
4184 return NULL_RTX;
4186 dest_mem = get_memory_rtx (dest, len);
4187 store_by_pieces (dest_mem, tree_to_uhwi (len),
4188 builtin_strncpy_read_str,
4189 CONST_CAST (char *, p), dest_align, false, 0);
4190 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4191 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4192 return dest_mem;
4195 return NULL_RTX;
4198 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4199 bytes from constant string DATA + OFFSET and return it as target
4200 constant. */
4203 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4204 scalar_int_mode mode)
4206 const char *c = (const char *) data;
4207 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4209 memset (p, *c, GET_MODE_SIZE (mode));
4211 return c_readstr (p, mode);
4214 /* Callback routine for store_by_pieces. Return the RTL of a register
4215 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4216 char value given in the RTL register data. For example, if mode is
4217 4 bytes wide, return the RTL for 0x01010101*data. */
4219 static rtx
4220 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4221 scalar_int_mode mode)
4223 rtx target, coeff;
4224 size_t size;
4225 char *p;
4227 size = GET_MODE_SIZE (mode);
4228 if (size == 1)
4229 return (rtx) data;
4231 p = XALLOCAVEC (char, size);
4232 memset (p, 1, size);
4233 coeff = c_readstr (p, mode);
4235 target = convert_to_mode (mode, (rtx) data, 1);
4236 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4237 return force_reg (mode, target);
4240 /* Expand expression EXP, which is a call to the memset builtin. Return
4241 NULL_RTX if we failed the caller should emit a normal call, otherwise
4242 try to get the result in TARGET, if convenient (and in mode MODE if that's
4243 convenient). */
4245 static rtx
4246 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4248 if (!validate_arglist (exp,
4249 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4250 return NULL_RTX;
4252 tree dest = CALL_EXPR_ARG (exp, 0);
4253 tree val = CALL_EXPR_ARG (exp, 1);
4254 tree len = CALL_EXPR_ARG (exp, 2);
4256 check_memop_access (exp, dest, NULL_TREE, len);
4258 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4261 /* Helper function to do the actual work for expand_builtin_memset. The
4262 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4263 so that this can also be called without constructing an actual CALL_EXPR.
4264 The other arguments and return value are the same as for
4265 expand_builtin_memset. */
4267 static rtx
4268 expand_builtin_memset_args (tree dest, tree val, tree len,
4269 rtx target, machine_mode mode, tree orig_exp)
4271 tree fndecl, fn;
4272 enum built_in_function fcode;
4273 machine_mode val_mode;
4274 char c;
4275 unsigned int dest_align;
4276 rtx dest_mem, dest_addr, len_rtx;
4277 HOST_WIDE_INT expected_size = -1;
4278 unsigned int expected_align = 0;
4279 unsigned HOST_WIDE_INT min_size;
4280 unsigned HOST_WIDE_INT max_size;
4281 unsigned HOST_WIDE_INT probable_max_size;
4283 dest_align = get_pointer_alignment (dest);
4285 /* If DEST is not a pointer type, don't do this operation in-line. */
4286 if (dest_align == 0)
4287 return NULL_RTX;
4289 if (currently_expanding_gimple_stmt)
4290 stringop_block_profile (currently_expanding_gimple_stmt,
4291 &expected_align, &expected_size);
4293 if (expected_align < dest_align)
4294 expected_align = dest_align;
4296 /* If the LEN parameter is zero, return DEST. */
4297 if (integer_zerop (len))
4299 /* Evaluate and ignore VAL in case it has side-effects. */
4300 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4301 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4304 /* Stabilize the arguments in case we fail. */
4305 dest = builtin_save_expr (dest);
4306 val = builtin_save_expr (val);
4307 len = builtin_save_expr (len);
4309 len_rtx = expand_normal (len);
4310 determine_block_size (len, len_rtx, &min_size, &max_size,
4311 &probable_max_size);
4312 dest_mem = get_memory_rtx (dest, len);
4313 val_mode = TYPE_MODE (unsigned_char_type_node);
4315 if (TREE_CODE (val) != INTEGER_CST)
4317 rtx val_rtx;
4319 val_rtx = expand_normal (val);
4320 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4322 /* Assume that we can memset by pieces if we can store
4323 * the coefficients by pieces (in the required modes).
4324 * We can't pass builtin_memset_gen_str as that emits RTL. */
4325 c = 1;
4326 if (tree_fits_uhwi_p (len)
4327 && can_store_by_pieces (tree_to_uhwi (len),
4328 builtin_memset_read_str, &c, dest_align,
4329 true))
4331 val_rtx = force_reg (val_mode, val_rtx);
4332 store_by_pieces (dest_mem, tree_to_uhwi (len),
4333 builtin_memset_gen_str, val_rtx, dest_align,
4334 true, 0);
4336 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4337 dest_align, expected_align,
4338 expected_size, min_size, max_size,
4339 probable_max_size))
4340 goto do_libcall;
4342 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4343 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4344 return dest_mem;
4347 if (target_char_cast (val, &c))
4348 goto do_libcall;
4350 if (c)
4352 if (tree_fits_uhwi_p (len)
4353 && can_store_by_pieces (tree_to_uhwi (len),
4354 builtin_memset_read_str, &c, dest_align,
4355 true))
4356 store_by_pieces (dest_mem, tree_to_uhwi (len),
4357 builtin_memset_read_str, &c, dest_align, true, 0);
4358 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4359 gen_int_mode (c, val_mode),
4360 dest_align, expected_align,
4361 expected_size, min_size, max_size,
4362 probable_max_size))
4363 goto do_libcall;
4365 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4366 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4367 return dest_mem;
4370 set_mem_align (dest_mem, dest_align);
4371 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4372 CALL_EXPR_TAILCALL (orig_exp)
4373 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4374 expected_align, expected_size,
4375 min_size, max_size,
4376 probable_max_size);
4378 if (dest_addr == 0)
4380 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4381 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4384 return dest_addr;
4386 do_libcall:
4387 fndecl = get_callee_fndecl (orig_exp);
4388 fcode = DECL_FUNCTION_CODE (fndecl);
4389 if (fcode == BUILT_IN_MEMSET)
4390 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4391 dest, val, len);
4392 else if (fcode == BUILT_IN_BZERO)
4393 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4394 dest, len);
4395 else
4396 gcc_unreachable ();
4397 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4398 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4399 return expand_call (fn, target, target == const0_rtx);
4402 /* Expand expression EXP, which is a call to the bzero builtin. Return
4403 NULL_RTX if we failed the caller should emit a normal call. */
4405 static rtx
4406 expand_builtin_bzero (tree exp)
4408 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4409 return NULL_RTX;
4411 tree dest = CALL_EXPR_ARG (exp, 0);
4412 tree size = CALL_EXPR_ARG (exp, 1);
4414 check_memop_access (exp, dest, NULL_TREE, size);
4416 /* New argument list transforming bzero(ptr x, int y) to
4417 memset(ptr x, int 0, size_t y). This is done this way
4418 so that if it isn't expanded inline, we fallback to
4419 calling bzero instead of memset. */
4421 location_t loc = EXPR_LOCATION (exp);
4423 return expand_builtin_memset_args (dest, integer_zero_node,
4424 fold_convert_loc (loc,
4425 size_type_node, size),
4426 const0_rtx, VOIDmode, exp);
4429 /* Try to expand cmpstr operation ICODE with the given operands.
4430 Return the result rtx on success, otherwise return null. */
4432 static rtx
4433 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4434 HOST_WIDE_INT align)
4436 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4438 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4439 target = NULL_RTX;
4441 struct expand_operand ops[4];
4442 create_output_operand (&ops[0], target, insn_mode);
4443 create_fixed_operand (&ops[1], arg1_rtx);
4444 create_fixed_operand (&ops[2], arg2_rtx);
4445 create_integer_operand (&ops[3], align);
4446 if (maybe_expand_insn (icode, 4, ops))
4447 return ops[0].value;
4448 return NULL_RTX;
4451 /* Expand expression EXP, which is a call to the memcmp built-in function.
4452 Return NULL_RTX if we failed and the caller should emit a normal call,
4453 otherwise try to get the result in TARGET, if convenient.
4454 RESULT_EQ is true if we can relax the returned value to be either zero
4455 or nonzero, without caring about the sign. */
4457 static rtx
4458 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4460 if (!validate_arglist (exp,
4461 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4462 return NULL_RTX;
4464 tree arg1 = CALL_EXPR_ARG (exp, 0);
4465 tree arg2 = CALL_EXPR_ARG (exp, 1);
4466 tree len = CALL_EXPR_ARG (exp, 2);
4467 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4468 bool no_overflow = true;
4470 /* Diagnose calls where the specified length exceeds the size of either
4471 object. */
4472 tree size = compute_objsize (arg1, 0);
4473 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4474 len, /*maxread=*/NULL_TREE, size,
4475 /*objsize=*/NULL_TREE);
4476 if (no_overflow)
4478 size = compute_objsize (arg2, 0);
4479 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4480 len, /*maxread=*/NULL_TREE, size,
4481 /*objsize=*/NULL_TREE);
4484 /* Due to the performance benefit, always inline the calls first
4485 when result_eq is false. */
4486 rtx result = NULL_RTX;
4488 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4490 result = inline_expand_builtin_string_cmp (exp, target);
4491 if (result)
4492 return result;
4495 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4496 location_t loc = EXPR_LOCATION (exp);
4498 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4499 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4501 /* If we don't have POINTER_TYPE, call the function. */
4502 if (arg1_align == 0 || arg2_align == 0)
4503 return NULL_RTX;
4505 rtx arg1_rtx = get_memory_rtx (arg1, len);
4506 rtx arg2_rtx = get_memory_rtx (arg2, len);
4507 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4509 /* Set MEM_SIZE as appropriate. */
4510 if (CONST_INT_P (len_rtx))
4512 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4513 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4516 by_pieces_constfn constfn = NULL;
4518 const char *src_str = c_getstr (arg2);
4519 if (result_eq && src_str == NULL)
4521 src_str = c_getstr (arg1);
4522 if (src_str != NULL)
4523 std::swap (arg1_rtx, arg2_rtx);
4526 /* If SRC is a string constant and block move would be done
4527 by pieces, we can avoid loading the string from memory
4528 and only stored the computed constants. */
4529 if (src_str
4530 && CONST_INT_P (len_rtx)
4531 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4532 constfn = builtin_memcpy_read_str;
4534 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4535 TREE_TYPE (len), target,
4536 result_eq, constfn,
4537 CONST_CAST (char *, src_str));
4539 if (result)
4541 /* Return the value in the proper mode for this function. */
4542 if (GET_MODE (result) == mode)
4543 return result;
4545 if (target != 0)
4547 convert_move (target, result, 0);
4548 return target;
4551 return convert_to_mode (mode, result, 0);
4554 return NULL_RTX;
4557 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4558 if we failed the caller should emit a normal call, otherwise try to get
4559 the result in TARGET, if convenient. */
4561 static rtx
4562 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4564 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4565 return NULL_RTX;
4567 /* Due to the performance benefit, always inline the calls first. */
4568 rtx result = NULL_RTX;
4569 result = inline_expand_builtin_string_cmp (exp, target);
4570 if (result)
4571 return result;
4573 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4574 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4575 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4576 return NULL_RTX;
4578 tree arg1 = CALL_EXPR_ARG (exp, 0);
4579 tree arg2 = CALL_EXPR_ARG (exp, 1);
4581 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4582 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4584 /* If we don't have POINTER_TYPE, call the function. */
4585 if (arg1_align == 0 || arg2_align == 0)
4586 return NULL_RTX;
4588 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4589 arg1 = builtin_save_expr (arg1);
4590 arg2 = builtin_save_expr (arg2);
4592 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4593 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4595 /* Try to call cmpstrsi. */
4596 if (cmpstr_icode != CODE_FOR_nothing)
4597 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4598 MIN (arg1_align, arg2_align));
4600 /* Try to determine at least one length and call cmpstrnsi. */
4601 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4603 tree len;
4604 rtx arg3_rtx;
4606 tree len1 = c_strlen (arg1, 1);
4607 tree len2 = c_strlen (arg2, 1);
4609 if (len1)
4610 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4611 if (len2)
4612 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4614 /* If we don't have a constant length for the first, use the length
4615 of the second, if we know it. We don't require a constant for
4616 this case; some cost analysis could be done if both are available
4617 but neither is constant. For now, assume they're equally cheap,
4618 unless one has side effects. If both strings have constant lengths,
4619 use the smaller. */
4621 if (!len1)
4622 len = len2;
4623 else if (!len2)
4624 len = len1;
4625 else if (TREE_SIDE_EFFECTS (len1))
4626 len = len2;
4627 else if (TREE_SIDE_EFFECTS (len2))
4628 len = len1;
4629 else if (TREE_CODE (len1) != INTEGER_CST)
4630 len = len2;
4631 else if (TREE_CODE (len2) != INTEGER_CST)
4632 len = len1;
4633 else if (tree_int_cst_lt (len1, len2))
4634 len = len1;
4635 else
4636 len = len2;
4638 /* If both arguments have side effects, we cannot optimize. */
4639 if (len && !TREE_SIDE_EFFECTS (len))
4641 arg3_rtx = expand_normal (len);
4642 result = expand_cmpstrn_or_cmpmem
4643 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4644 arg3_rtx, MIN (arg1_align, arg2_align));
4648 tree fndecl = get_callee_fndecl (exp);
4649 if (result)
4651 /* Check to see if the argument was declared attribute nonstring
4652 and if so, issue a warning since at this point it's not known
4653 to be nul-terminated. */
4654 maybe_warn_nonstring_arg (fndecl, exp);
4656 /* Return the value in the proper mode for this function. */
4657 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4658 if (GET_MODE (result) == mode)
4659 return result;
4660 if (target == 0)
4661 return convert_to_mode (mode, result, 0);
4662 convert_move (target, result, 0);
4663 return target;
4666 /* Expand the library call ourselves using a stabilized argument
4667 list to avoid re-evaluating the function's arguments twice. */
4668 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4669 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4670 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4671 return expand_call (fn, target, target == const0_rtx);
4674 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4675 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4676 the result in TARGET, if convenient. */
4678 static rtx
4679 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4680 ATTRIBUTE_UNUSED machine_mode mode)
4682 if (!validate_arglist (exp,
4683 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4684 return NULL_RTX;
4686 /* Due to the performance benefit, always inline the calls first. */
4687 rtx result = NULL_RTX;
4688 result = inline_expand_builtin_string_cmp (exp, target);
4689 if (result)
4690 return result;
4692 /* If c_strlen can determine an expression for one of the string
4693 lengths, and it doesn't have side effects, then emit cmpstrnsi
4694 using length MIN(strlen(string)+1, arg3). */
4695 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4696 if (cmpstrn_icode == CODE_FOR_nothing)
4697 return NULL_RTX;
4699 tree len;
4701 tree arg1 = CALL_EXPR_ARG (exp, 0);
4702 tree arg2 = CALL_EXPR_ARG (exp, 1);
4703 tree arg3 = CALL_EXPR_ARG (exp, 2);
4705 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4706 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4708 tree len1 = c_strlen (arg1, 1);
4709 tree len2 = c_strlen (arg2, 1);
4711 location_t loc = EXPR_LOCATION (exp);
4713 if (len1)
4714 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4715 if (len2)
4716 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4718 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4720 /* If we don't have a constant length for the first, use the length
4721 of the second, if we know it. If neither string is constant length,
4722 use the given length argument. We don't require a constant for
4723 this case; some cost analysis could be done if both are available
4724 but neither is constant. For now, assume they're equally cheap,
4725 unless one has side effects. If both strings have constant lengths,
4726 use the smaller. */
4728 if (!len1 && !len2)
4729 len = len3;
4730 else if (!len1)
4731 len = len2;
4732 else if (!len2)
4733 len = len1;
4734 else if (TREE_SIDE_EFFECTS (len1))
4735 len = len2;
4736 else if (TREE_SIDE_EFFECTS (len2))
4737 len = len1;
4738 else if (TREE_CODE (len1) != INTEGER_CST)
4739 len = len2;
4740 else if (TREE_CODE (len2) != INTEGER_CST)
4741 len = len1;
4742 else if (tree_int_cst_lt (len1, len2))
4743 len = len1;
4744 else
4745 len = len2;
4747 /* If we are not using the given length, we must incorporate it here.
4748 The actual new length parameter will be MIN(len,arg3) in this case. */
4749 if (len != len3)
4750 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4751 rtx arg1_rtx = get_memory_rtx (arg1, len);
4752 rtx arg2_rtx = get_memory_rtx (arg2, len);
4753 rtx arg3_rtx = expand_normal (len);
4754 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4755 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4756 MIN (arg1_align, arg2_align));
4758 tree fndecl = get_callee_fndecl (exp);
4759 if (result)
4761 /* Check to see if the argument was declared attribute nonstring
4762 and if so, issue a warning since at this point it's not known
4763 to be nul-terminated. */
4764 maybe_warn_nonstring_arg (fndecl, exp);
4766 /* Return the value in the proper mode for this function. */
4767 mode = TYPE_MODE (TREE_TYPE (exp));
4768 if (GET_MODE (result) == mode)
4769 return result;
4770 if (target == 0)
4771 return convert_to_mode (mode, result, 0);
4772 convert_move (target, result, 0);
4773 return target;
4776 /* Expand the library call ourselves using a stabilized argument
4777 list to avoid re-evaluating the function's arguments twice. */
4778 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4779 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4780 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4781 return expand_call (fn, target, target == const0_rtx);
4784 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4785 if that's convenient. */
4788 expand_builtin_saveregs (void)
4790 rtx val;
4791 rtx_insn *seq;
4793 /* Don't do __builtin_saveregs more than once in a function.
4794 Save the result of the first call and reuse it. */
4795 if (saveregs_value != 0)
4796 return saveregs_value;
4798 /* When this function is called, it means that registers must be
4799 saved on entry to this function. So we migrate the call to the
4800 first insn of this function. */
4802 start_sequence ();
4804 /* Do whatever the machine needs done in this case. */
4805 val = targetm.calls.expand_builtin_saveregs ();
4807 seq = get_insns ();
4808 end_sequence ();
4810 saveregs_value = val;
4812 /* Put the insns after the NOTE that starts the function. If this
4813 is inside a start_sequence, make the outer-level insn chain current, so
4814 the code is placed at the start of the function. */
4815 push_topmost_sequence ();
4816 emit_insn_after (seq, entry_of_function ());
4817 pop_topmost_sequence ();
4819 return val;
4822 /* Expand a call to __builtin_next_arg. */
4824 static rtx
4825 expand_builtin_next_arg (void)
4827 /* Checking arguments is already done in fold_builtin_next_arg
4828 that must be called before this function. */
4829 return expand_binop (ptr_mode, add_optab,
4830 crtl->args.internal_arg_pointer,
4831 crtl->args.arg_offset_rtx,
4832 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4835 /* Make it easier for the backends by protecting the valist argument
4836 from multiple evaluations. */
4838 static tree
4839 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4841 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4843 /* The current way of determining the type of valist is completely
4844 bogus. We should have the information on the va builtin instead. */
4845 if (!vatype)
4846 vatype = targetm.fn_abi_va_list (cfun->decl);
4848 if (TREE_CODE (vatype) == ARRAY_TYPE)
4850 if (TREE_SIDE_EFFECTS (valist))
4851 valist = save_expr (valist);
4853 /* For this case, the backends will be expecting a pointer to
4854 vatype, but it's possible we've actually been given an array
4855 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4856 So fix it. */
4857 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4859 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4860 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4863 else
4865 tree pt = build_pointer_type (vatype);
4867 if (! needs_lvalue)
4869 if (! TREE_SIDE_EFFECTS (valist))
4870 return valist;
4872 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4873 TREE_SIDE_EFFECTS (valist) = 1;
4876 if (TREE_SIDE_EFFECTS (valist))
4877 valist = save_expr (valist);
4878 valist = fold_build2_loc (loc, MEM_REF,
4879 vatype, valist, build_int_cst (pt, 0));
4882 return valist;
4885 /* The "standard" definition of va_list is void*. */
4887 tree
4888 std_build_builtin_va_list (void)
4890 return ptr_type_node;
4893 /* The "standard" abi va_list is va_list_type_node. */
4895 tree
4896 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4898 return va_list_type_node;
4901 /* The "standard" type of va_list is va_list_type_node. */
4903 tree
4904 std_canonical_va_list_type (tree type)
4906 tree wtype, htype;
4908 wtype = va_list_type_node;
4909 htype = type;
4911 if (TREE_CODE (wtype) == ARRAY_TYPE)
4913 /* If va_list is an array type, the argument may have decayed
4914 to a pointer type, e.g. by being passed to another function.
4915 In that case, unwrap both types so that we can compare the
4916 underlying records. */
4917 if (TREE_CODE (htype) == ARRAY_TYPE
4918 || POINTER_TYPE_P (htype))
4920 wtype = TREE_TYPE (wtype);
4921 htype = TREE_TYPE (htype);
4924 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4925 return va_list_type_node;
4927 return NULL_TREE;
4930 /* The "standard" implementation of va_start: just assign `nextarg' to
4931 the variable. */
4933 void
4934 std_expand_builtin_va_start (tree valist, rtx nextarg)
4936 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4937 convert_move (va_r, nextarg, 0);
4940 /* Expand EXP, a call to __builtin_va_start. */
4942 static rtx
4943 expand_builtin_va_start (tree exp)
4945 rtx nextarg;
4946 tree valist;
4947 location_t loc = EXPR_LOCATION (exp);
4949 if (call_expr_nargs (exp) < 2)
4951 error_at (loc, "too few arguments to function %<va_start%>");
4952 return const0_rtx;
4955 if (fold_builtin_next_arg (exp, true))
4956 return const0_rtx;
4958 nextarg = expand_builtin_next_arg ();
4959 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4961 if (targetm.expand_builtin_va_start)
4962 targetm.expand_builtin_va_start (valist, nextarg);
4963 else
4964 std_expand_builtin_va_start (valist, nextarg);
4966 return const0_rtx;
4969 /* Expand EXP, a call to __builtin_va_end. */
4971 static rtx
4972 expand_builtin_va_end (tree exp)
4974 tree valist = CALL_EXPR_ARG (exp, 0);
4976 /* Evaluate for side effects, if needed. I hate macros that don't
4977 do that. */
4978 if (TREE_SIDE_EFFECTS (valist))
4979 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4981 return const0_rtx;
4984 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4985 builtin rather than just as an assignment in stdarg.h because of the
4986 nastiness of array-type va_list types. */
4988 static rtx
4989 expand_builtin_va_copy (tree exp)
4991 tree dst, src, t;
4992 location_t loc = EXPR_LOCATION (exp);
4994 dst = CALL_EXPR_ARG (exp, 0);
4995 src = CALL_EXPR_ARG (exp, 1);
4997 dst = stabilize_va_list_loc (loc, dst, 1);
4998 src = stabilize_va_list_loc (loc, src, 0);
5000 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5002 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5004 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5005 TREE_SIDE_EFFECTS (t) = 1;
5006 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5008 else
5010 rtx dstb, srcb, size;
5012 /* Evaluate to pointers. */
5013 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5014 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5015 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5016 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5018 dstb = convert_memory_address (Pmode, dstb);
5019 srcb = convert_memory_address (Pmode, srcb);
5021 /* "Dereference" to BLKmode memories. */
5022 dstb = gen_rtx_MEM (BLKmode, dstb);
5023 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5024 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5025 srcb = gen_rtx_MEM (BLKmode, srcb);
5026 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5027 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5029 /* Copy. */
5030 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5033 return const0_rtx;
5036 /* Expand a call to one of the builtin functions __builtin_frame_address or
5037 __builtin_return_address. */
5039 static rtx
5040 expand_builtin_frame_address (tree fndecl, tree exp)
5042 /* The argument must be a nonnegative integer constant.
5043 It counts the number of frames to scan up the stack.
5044 The value is either the frame pointer value or the return
5045 address saved in that frame. */
5046 if (call_expr_nargs (exp) == 0)
5047 /* Warning about missing arg was already issued. */
5048 return const0_rtx;
5049 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5051 error ("invalid argument to %qD", fndecl);
5052 return const0_rtx;
5054 else
5056 /* Number of frames to scan up the stack. */
5057 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5059 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5061 /* Some ports cannot access arbitrary stack frames. */
5062 if (tem == NULL)
5064 warning (0, "unsupported argument to %qD", fndecl);
5065 return const0_rtx;
5068 if (count)
5070 /* Warn since no effort is made to ensure that any frame
5071 beyond the current one exists or can be safely reached. */
5072 warning (OPT_Wframe_address, "calling %qD with "
5073 "a nonzero argument is unsafe", fndecl);
5076 /* For __builtin_frame_address, return what we've got. */
5077 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5078 return tem;
5080 if (!REG_P (tem)
5081 && ! CONSTANT_P (tem))
5082 tem = copy_addr_to_reg (tem);
5083 return tem;
5087 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5088 failed and the caller should emit a normal call. */
5090 static rtx
5091 expand_builtin_alloca (tree exp)
5093 rtx op0;
5094 rtx result;
5095 unsigned int align;
5096 tree fndecl = get_callee_fndecl (exp);
5097 HOST_WIDE_INT max_size;
5098 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5099 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5100 bool valid_arglist
5101 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5102 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5103 VOID_TYPE)
5104 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5105 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5106 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5108 if (!valid_arglist)
5109 return NULL_RTX;
5111 if ((alloca_for_var
5112 && warn_vla_limit >= HOST_WIDE_INT_MAX
5113 && warn_alloc_size_limit < warn_vla_limit)
5114 || (!alloca_for_var
5115 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5116 && warn_alloc_size_limit < warn_alloca_limit
5119 /* -Walloca-larger-than and -Wvla-larger-than settings of
5120 less than HOST_WIDE_INT_MAX override the more general
5121 -Walloc-size-larger-than so unless either of the former
5122 options is smaller than the last one (wchich would imply
5123 that the call was already checked), check the alloca
5124 arguments for overflow. */
5125 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5126 int idx[] = { 0, -1 };
5127 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5130 /* Compute the argument. */
5131 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5133 /* Compute the alignment. */
5134 align = (fcode == BUILT_IN_ALLOCA
5135 ? BIGGEST_ALIGNMENT
5136 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5138 /* Compute the maximum size. */
5139 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5140 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5141 : -1);
5143 /* Allocate the desired space. If the allocation stems from the declaration
5144 of a variable-sized object, it cannot accumulate. */
5145 result
5146 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5147 result = convert_memory_address (ptr_mode, result);
5149 return result;
5152 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5153 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5154 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5155 handle_builtin_stack_restore function. */
5157 static rtx
5158 expand_asan_emit_allocas_unpoison (tree exp)
5160 tree arg0 = CALL_EXPR_ARG (exp, 0);
5161 tree arg1 = CALL_EXPR_ARG (exp, 1);
5162 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5163 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5164 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5165 stack_pointer_rtx, NULL_RTX, 0,
5166 OPTAB_LIB_WIDEN);
5167 off = convert_modes (ptr_mode, Pmode, off, 0);
5168 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5169 OPTAB_LIB_WIDEN);
5170 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5171 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5172 top, ptr_mode, bot, ptr_mode);
5173 return ret;
5176 /* Expand a call to bswap builtin in EXP.
5177 Return NULL_RTX if a normal call should be emitted rather than expanding the
5178 function in-line. If convenient, the result should be placed in TARGET.
5179 SUBTARGET may be used as the target for computing one of EXP's operands. */
5181 static rtx
5182 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5183 rtx subtarget)
5185 tree arg;
5186 rtx op0;
5188 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5189 return NULL_RTX;
5191 arg = CALL_EXPR_ARG (exp, 0);
5192 op0 = expand_expr (arg,
5193 subtarget && GET_MODE (subtarget) == target_mode
5194 ? subtarget : NULL_RTX,
5195 target_mode, EXPAND_NORMAL);
5196 if (GET_MODE (op0) != target_mode)
5197 op0 = convert_to_mode (target_mode, op0, 1);
5199 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5201 gcc_assert (target);
5203 return convert_to_mode (target_mode, target, 1);
5206 /* Expand a call to a unary builtin in EXP.
5207 Return NULL_RTX if a normal call should be emitted rather than expanding the
5208 function in-line. If convenient, the result should be placed in TARGET.
5209 SUBTARGET may be used as the target for computing one of EXP's operands. */
5211 static rtx
5212 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5213 rtx subtarget, optab op_optab)
5215 rtx op0;
5217 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5218 return NULL_RTX;
5220 /* Compute the argument. */
5221 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5222 (subtarget
5223 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5224 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5225 VOIDmode, EXPAND_NORMAL);
5226 /* Compute op, into TARGET if possible.
5227 Set TARGET to wherever the result comes back. */
5228 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5229 op_optab, op0, target, op_optab != clrsb_optab);
5230 gcc_assert (target);
5232 return convert_to_mode (target_mode, target, 0);
5235 /* Expand a call to __builtin_expect. We just return our argument
5236 as the builtin_expect semantic should've been already executed by
5237 tree branch prediction pass. */
5239 static rtx
5240 expand_builtin_expect (tree exp, rtx target)
5242 tree arg;
5244 if (call_expr_nargs (exp) < 2)
5245 return const0_rtx;
5246 arg = CALL_EXPR_ARG (exp, 0);
5248 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5249 /* When guessing was done, the hints should be already stripped away. */
5250 gcc_assert (!flag_guess_branch_prob
5251 || optimize == 0 || seen_error ());
5252 return target;
5255 /* Expand a call to __builtin_expect_with_probability. We just return our
5256 argument as the builtin_expect semantic should've been already executed by
5257 tree branch prediction pass. */
5259 static rtx
5260 expand_builtin_expect_with_probability (tree exp, rtx target)
5262 tree arg;
5264 if (call_expr_nargs (exp) < 3)
5265 return const0_rtx;
5266 arg = CALL_EXPR_ARG (exp, 0);
5268 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5269 /* When guessing was done, the hints should be already stripped away. */
5270 gcc_assert (!flag_guess_branch_prob
5271 || optimize == 0 || seen_error ());
5272 return target;
5276 /* Expand a call to __builtin_assume_aligned. We just return our first
5277 argument as the builtin_assume_aligned semantic should've been already
5278 executed by CCP. */
5280 static rtx
5281 expand_builtin_assume_aligned (tree exp, rtx target)
5283 if (call_expr_nargs (exp) < 2)
5284 return const0_rtx;
5285 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5286 EXPAND_NORMAL);
5287 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5288 && (call_expr_nargs (exp) < 3
5289 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5290 return target;
5293 void
5294 expand_builtin_trap (void)
5296 if (targetm.have_trap ())
5298 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5299 /* For trap insns when not accumulating outgoing args force
5300 REG_ARGS_SIZE note to prevent crossjumping of calls with
5301 different args sizes. */
5302 if (!ACCUMULATE_OUTGOING_ARGS)
5303 add_args_size_note (insn, stack_pointer_delta);
5305 else
5307 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5308 tree call_expr = build_call_expr (fn, 0);
5309 expand_call (call_expr, NULL_RTX, false);
5312 emit_barrier ();
5315 /* Expand a call to __builtin_unreachable. We do nothing except emit
5316 a barrier saying that control flow will not pass here.
5318 It is the responsibility of the program being compiled to ensure
5319 that control flow does never reach __builtin_unreachable. */
5320 static void
5321 expand_builtin_unreachable (void)
5323 emit_barrier ();
5326 /* Expand EXP, a call to fabs, fabsf or fabsl.
5327 Return NULL_RTX if a normal call should be emitted rather than expanding
5328 the function inline. If convenient, the result should be placed
5329 in TARGET. SUBTARGET may be used as the target for computing
5330 the operand. */
5332 static rtx
5333 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5335 machine_mode mode;
5336 tree arg;
5337 rtx op0;
5339 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5340 return NULL_RTX;
5342 arg = CALL_EXPR_ARG (exp, 0);
5343 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5344 mode = TYPE_MODE (TREE_TYPE (arg));
5345 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5346 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5349 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5350 Return NULL is a normal call should be emitted rather than expanding the
5351 function inline. If convenient, the result should be placed in TARGET.
5352 SUBTARGET may be used as the target for computing the operand. */
5354 static rtx
5355 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5357 rtx op0, op1;
5358 tree arg;
5360 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5361 return NULL_RTX;
5363 arg = CALL_EXPR_ARG (exp, 0);
5364 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5366 arg = CALL_EXPR_ARG (exp, 1);
5367 op1 = expand_normal (arg);
5369 return expand_copysign (op0, op1, target);
5372 /* Expand a call to __builtin___clear_cache. */
5374 static rtx
5375 expand_builtin___clear_cache (tree exp)
5377 if (!targetm.code_for_clear_cache)
5379 #ifdef CLEAR_INSN_CACHE
5380 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5381 does something. Just do the default expansion to a call to
5382 __clear_cache(). */
5383 return NULL_RTX;
5384 #else
5385 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5386 does nothing. There is no need to call it. Do nothing. */
5387 return const0_rtx;
5388 #endif /* CLEAR_INSN_CACHE */
5391 /* We have a "clear_cache" insn, and it will handle everything. */
5392 tree begin, end;
5393 rtx begin_rtx, end_rtx;
5395 /* We must not expand to a library call. If we did, any
5396 fallback library function in libgcc that might contain a call to
5397 __builtin___clear_cache() would recurse infinitely. */
5398 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5400 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5401 return const0_rtx;
5404 if (targetm.have_clear_cache ())
5406 struct expand_operand ops[2];
5408 begin = CALL_EXPR_ARG (exp, 0);
5409 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5411 end = CALL_EXPR_ARG (exp, 1);
5412 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5414 create_address_operand (&ops[0], begin_rtx);
5415 create_address_operand (&ops[1], end_rtx);
5416 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5417 return const0_rtx;
5419 return const0_rtx;
5422 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5424 static rtx
5425 round_trampoline_addr (rtx tramp)
5427 rtx temp, addend, mask;
5429 /* If we don't need too much alignment, we'll have been guaranteed
5430 proper alignment by get_trampoline_type. */
5431 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5432 return tramp;
5434 /* Round address up to desired boundary. */
5435 temp = gen_reg_rtx (Pmode);
5436 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5437 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5439 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5440 temp, 0, OPTAB_LIB_WIDEN);
5441 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5442 temp, 0, OPTAB_LIB_WIDEN);
5444 return tramp;
5447 static rtx
5448 expand_builtin_init_trampoline (tree exp, bool onstack)
5450 tree t_tramp, t_func, t_chain;
5451 rtx m_tramp, r_tramp, r_chain, tmp;
5453 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5454 POINTER_TYPE, VOID_TYPE))
5455 return NULL_RTX;
5457 t_tramp = CALL_EXPR_ARG (exp, 0);
5458 t_func = CALL_EXPR_ARG (exp, 1);
5459 t_chain = CALL_EXPR_ARG (exp, 2);
5461 r_tramp = expand_normal (t_tramp);
5462 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5463 MEM_NOTRAP_P (m_tramp) = 1;
5465 /* If ONSTACK, the TRAMP argument should be the address of a field
5466 within the local function's FRAME decl. Either way, let's see if
5467 we can fill in the MEM_ATTRs for this memory. */
5468 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5469 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5471 /* Creator of a heap trampoline is responsible for making sure the
5472 address is aligned to at least STACK_BOUNDARY. Normally malloc
5473 will ensure this anyhow. */
5474 tmp = round_trampoline_addr (r_tramp);
5475 if (tmp != r_tramp)
5477 m_tramp = change_address (m_tramp, BLKmode, tmp);
5478 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5479 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5482 /* The FUNC argument should be the address of the nested function.
5483 Extract the actual function decl to pass to the hook. */
5484 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5485 t_func = TREE_OPERAND (t_func, 0);
5486 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5488 r_chain = expand_normal (t_chain);
5490 /* Generate insns to initialize the trampoline. */
5491 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5493 if (onstack)
5495 trampolines_created = 1;
5497 if (targetm.calls.custom_function_descriptors != 0)
5498 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5499 "trampoline generated for nested function %qD", t_func);
5502 return const0_rtx;
5505 static rtx
5506 expand_builtin_adjust_trampoline (tree exp)
5508 rtx tramp;
5510 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5511 return NULL_RTX;
5513 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5514 tramp = round_trampoline_addr (tramp);
5515 if (targetm.calls.trampoline_adjust_address)
5516 tramp = targetm.calls.trampoline_adjust_address (tramp);
5518 return tramp;
5521 /* Expand a call to the builtin descriptor initialization routine.
5522 A descriptor is made up of a couple of pointers to the static
5523 chain and the code entry in this order. */
5525 static rtx
5526 expand_builtin_init_descriptor (tree exp)
5528 tree t_descr, t_func, t_chain;
5529 rtx m_descr, r_descr, r_func, r_chain;
5531 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5532 VOID_TYPE))
5533 return NULL_RTX;
5535 t_descr = CALL_EXPR_ARG (exp, 0);
5536 t_func = CALL_EXPR_ARG (exp, 1);
5537 t_chain = CALL_EXPR_ARG (exp, 2);
5539 r_descr = expand_normal (t_descr);
5540 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5541 MEM_NOTRAP_P (m_descr) = 1;
5543 r_func = expand_normal (t_func);
5544 r_chain = expand_normal (t_chain);
5546 /* Generate insns to initialize the descriptor. */
5547 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5548 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5549 POINTER_SIZE / BITS_PER_UNIT), r_func);
5551 return const0_rtx;
5554 /* Expand a call to the builtin descriptor adjustment routine. */
5556 static rtx
5557 expand_builtin_adjust_descriptor (tree exp)
5559 rtx tramp;
5561 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5562 return NULL_RTX;
5564 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5566 /* Unalign the descriptor to allow runtime identification. */
5567 tramp = plus_constant (ptr_mode, tramp,
5568 targetm.calls.custom_function_descriptors);
5570 return force_operand (tramp, NULL_RTX);
5573 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5574 function. The function first checks whether the back end provides
5575 an insn to implement signbit for the respective mode. If not, it
5576 checks whether the floating point format of the value is such that
5577 the sign bit can be extracted. If that is not the case, error out.
5578 EXP is the expression that is a call to the builtin function; if
5579 convenient, the result should be placed in TARGET. */
5580 static rtx
5581 expand_builtin_signbit (tree exp, rtx target)
5583 const struct real_format *fmt;
5584 scalar_float_mode fmode;
5585 scalar_int_mode rmode, imode;
5586 tree arg;
5587 int word, bitpos;
5588 enum insn_code icode;
5589 rtx temp;
5590 location_t loc = EXPR_LOCATION (exp);
5592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5593 return NULL_RTX;
5595 arg = CALL_EXPR_ARG (exp, 0);
5596 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5597 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5598 fmt = REAL_MODE_FORMAT (fmode);
5600 arg = builtin_save_expr (arg);
5602 /* Expand the argument yielding a RTX expression. */
5603 temp = expand_normal (arg);
5605 /* Check if the back end provides an insn that handles signbit for the
5606 argument's mode. */
5607 icode = optab_handler (signbit_optab, fmode);
5608 if (icode != CODE_FOR_nothing)
5610 rtx_insn *last = get_last_insn ();
5611 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5612 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5613 return target;
5614 delete_insns_since (last);
5617 /* For floating point formats without a sign bit, implement signbit
5618 as "ARG < 0.0". */
5619 bitpos = fmt->signbit_ro;
5620 if (bitpos < 0)
5622 /* But we can't do this if the format supports signed zero. */
5623 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5625 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5626 build_real (TREE_TYPE (arg), dconst0));
5627 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5630 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5632 imode = int_mode_for_mode (fmode).require ();
5633 temp = gen_lowpart (imode, temp);
5635 else
5637 imode = word_mode;
5638 /* Handle targets with different FP word orders. */
5639 if (FLOAT_WORDS_BIG_ENDIAN)
5640 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5641 else
5642 word = bitpos / BITS_PER_WORD;
5643 temp = operand_subword_force (temp, word, fmode);
5644 bitpos = bitpos % BITS_PER_WORD;
5647 /* Force the intermediate word_mode (or narrower) result into a
5648 register. This avoids attempting to create paradoxical SUBREGs
5649 of floating point modes below. */
5650 temp = force_reg (imode, temp);
5652 /* If the bitpos is within the "result mode" lowpart, the operation
5653 can be implement with a single bitwise AND. Otherwise, we need
5654 a right shift and an AND. */
5656 if (bitpos < GET_MODE_BITSIZE (rmode))
5658 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5660 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5661 temp = gen_lowpart (rmode, temp);
5662 temp = expand_binop (rmode, and_optab, temp,
5663 immed_wide_int_const (mask, rmode),
5664 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5666 else
5668 /* Perform a logical right shift to place the signbit in the least
5669 significant bit, then truncate the result to the desired mode
5670 and mask just this bit. */
5671 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5672 temp = gen_lowpart (rmode, temp);
5673 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5674 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5677 return temp;
5680 /* Expand fork or exec calls. TARGET is the desired target of the
5681 call. EXP is the call. FN is the
5682 identificator of the actual function. IGNORE is nonzero if the
5683 value is to be ignored. */
5685 static rtx
5686 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5688 tree id, decl;
5689 tree call;
5691 /* If we are not profiling, just call the function. */
5692 if (!profile_arc_flag)
5693 return NULL_RTX;
5695 /* Otherwise call the wrapper. This should be equivalent for the rest of
5696 compiler, so the code does not diverge, and the wrapper may run the
5697 code necessary for keeping the profiling sane. */
5699 switch (DECL_FUNCTION_CODE (fn))
5701 case BUILT_IN_FORK:
5702 id = get_identifier ("__gcov_fork");
5703 break;
5705 case BUILT_IN_EXECL:
5706 id = get_identifier ("__gcov_execl");
5707 break;
5709 case BUILT_IN_EXECV:
5710 id = get_identifier ("__gcov_execv");
5711 break;
5713 case BUILT_IN_EXECLP:
5714 id = get_identifier ("__gcov_execlp");
5715 break;
5717 case BUILT_IN_EXECLE:
5718 id = get_identifier ("__gcov_execle");
5719 break;
5721 case BUILT_IN_EXECVP:
5722 id = get_identifier ("__gcov_execvp");
5723 break;
5725 case BUILT_IN_EXECVE:
5726 id = get_identifier ("__gcov_execve");
5727 break;
5729 default:
5730 gcc_unreachable ();
5733 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5734 FUNCTION_DECL, id, TREE_TYPE (fn));
5735 DECL_EXTERNAL (decl) = 1;
5736 TREE_PUBLIC (decl) = 1;
5737 DECL_ARTIFICIAL (decl) = 1;
5738 TREE_NOTHROW (decl) = 1;
5739 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5740 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5741 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5742 return expand_call (call, target, ignore);
5747 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5748 the pointer in these functions is void*, the tree optimizers may remove
5749 casts. The mode computed in expand_builtin isn't reliable either, due
5750 to __sync_bool_compare_and_swap.
5752 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5753 group of builtins. This gives us log2 of the mode size. */
5755 static inline machine_mode
5756 get_builtin_sync_mode (int fcode_diff)
5758 /* The size is not negotiable, so ask not to get BLKmode in return
5759 if the target indicates that a smaller size would be better. */
5760 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5763 /* Expand the memory expression LOC and return the appropriate memory operand
5764 for the builtin_sync operations. */
5766 static rtx
5767 get_builtin_sync_mem (tree loc, machine_mode mode)
5769 rtx addr, mem;
5771 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5772 addr = convert_memory_address (Pmode, addr);
5774 /* Note that we explicitly do not want any alias information for this
5775 memory, so that we kill all other live memories. Otherwise we don't
5776 satisfy the full barrier semantics of the intrinsic. */
5777 mem = validize_mem (gen_rtx_MEM (mode, addr));
5779 /* The alignment needs to be at least according to that of the mode. */
5780 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5781 get_pointer_alignment (loc)));
5782 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5783 MEM_VOLATILE_P (mem) = 1;
5785 return mem;
5788 /* Make sure an argument is in the right mode.
5789 EXP is the tree argument.
5790 MODE is the mode it should be in. */
5792 static rtx
5793 expand_expr_force_mode (tree exp, machine_mode mode)
5795 rtx val;
5796 machine_mode old_mode;
5798 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5799 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5800 of CONST_INTs, where we know the old_mode only from the call argument. */
5802 old_mode = GET_MODE (val);
5803 if (old_mode == VOIDmode)
5804 old_mode = TYPE_MODE (TREE_TYPE (exp));
5805 val = convert_modes (mode, old_mode, val, 1);
5806 return val;
5810 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5811 EXP is the CALL_EXPR. CODE is the rtx code
5812 that corresponds to the arithmetic or logical operation from the name;
5813 an exception here is that NOT actually means NAND. TARGET is an optional
5814 place for us to store the results; AFTER is true if this is the
5815 fetch_and_xxx form. */
5817 static rtx
5818 expand_builtin_sync_operation (machine_mode mode, tree exp,
5819 enum rtx_code code, bool after,
5820 rtx target)
5822 rtx val, mem;
5823 location_t loc = EXPR_LOCATION (exp);
5825 if (code == NOT && warn_sync_nand)
5827 tree fndecl = get_callee_fndecl (exp);
5828 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5830 static bool warned_f_a_n, warned_n_a_f;
5832 switch (fcode)
5834 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5835 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5836 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5837 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5838 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5839 if (warned_f_a_n)
5840 break;
5842 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5843 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5844 warned_f_a_n = true;
5845 break;
5847 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5848 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5849 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5850 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5851 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5852 if (warned_n_a_f)
5853 break;
5855 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5856 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5857 warned_n_a_f = true;
5858 break;
5860 default:
5861 gcc_unreachable ();
5865 /* Expand the operands. */
5866 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5867 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5869 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5870 after);
5873 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5874 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5875 true if this is the boolean form. TARGET is a place for us to store the
5876 results; this is NOT optional if IS_BOOL is true. */
5878 static rtx
5879 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5880 bool is_bool, rtx target)
5882 rtx old_val, new_val, mem;
5883 rtx *pbool, *poval;
5885 /* Expand the operands. */
5886 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5887 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5888 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5890 pbool = poval = NULL;
5891 if (target != const0_rtx)
5893 if (is_bool)
5894 pbool = &target;
5895 else
5896 poval = &target;
5898 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5899 false, MEMMODEL_SYNC_SEQ_CST,
5900 MEMMODEL_SYNC_SEQ_CST))
5901 return NULL_RTX;
5903 return target;
5906 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5907 general form is actually an atomic exchange, and some targets only
5908 support a reduced form with the second argument being a constant 1.
5909 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5910 the results. */
5912 static rtx
5913 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5914 rtx target)
5916 rtx val, mem;
5918 /* Expand the operands. */
5919 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5920 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5922 return expand_sync_lock_test_and_set (target, mem, val);
5925 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5927 static void
5928 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5930 rtx mem;
5932 /* Expand the operands. */
5933 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5935 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5938 /* Given an integer representing an ``enum memmodel'', verify its
5939 correctness and return the memory model enum. */
5941 static enum memmodel
5942 get_memmodel (tree exp)
5944 rtx op;
5945 unsigned HOST_WIDE_INT val;
5946 source_location loc
5947 = expansion_point_location_if_in_system_header (input_location);
5949 /* If the parameter is not a constant, it's a run time value so we'll just
5950 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5951 if (TREE_CODE (exp) != INTEGER_CST)
5952 return MEMMODEL_SEQ_CST;
5954 op = expand_normal (exp);
5956 val = INTVAL (op);
5957 if (targetm.memmodel_check)
5958 val = targetm.memmodel_check (val);
5959 else if (val & ~MEMMODEL_MASK)
5961 warning_at (loc, OPT_Winvalid_memory_model,
5962 "unknown architecture specifier in memory model to builtin");
5963 return MEMMODEL_SEQ_CST;
5966 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5967 if (memmodel_base (val) >= MEMMODEL_LAST)
5969 warning_at (loc, OPT_Winvalid_memory_model,
5970 "invalid memory model argument to builtin");
5971 return MEMMODEL_SEQ_CST;
5974 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5975 be conservative and promote consume to acquire. */
5976 if (val == MEMMODEL_CONSUME)
5977 val = MEMMODEL_ACQUIRE;
5979 return (enum memmodel) val;
5982 /* Expand the __atomic_exchange intrinsic:
5983 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5984 EXP is the CALL_EXPR.
5985 TARGET is an optional place for us to store the results. */
5987 static rtx
5988 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5990 rtx val, mem;
5991 enum memmodel model;
5993 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5995 if (!flag_inline_atomics)
5996 return NULL_RTX;
5998 /* Expand the operands. */
5999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6000 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6002 return expand_atomic_exchange (target, mem, val, model);
6005 /* Expand the __atomic_compare_exchange intrinsic:
6006 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6007 TYPE desired, BOOL weak,
6008 enum memmodel success,
6009 enum memmodel failure)
6010 EXP is the CALL_EXPR.
6011 TARGET is an optional place for us to store the results. */
6013 static rtx
6014 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6015 rtx target)
6017 rtx expect, desired, mem, oldval;
6018 rtx_code_label *label;
6019 enum memmodel success, failure;
6020 tree weak;
6021 bool is_weak;
6022 source_location loc
6023 = expansion_point_location_if_in_system_header (input_location);
6025 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6026 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6028 if (failure > success)
6030 warning_at (loc, OPT_Winvalid_memory_model,
6031 "failure memory model cannot be stronger than success "
6032 "memory model for %<__atomic_compare_exchange%>");
6033 success = MEMMODEL_SEQ_CST;
6036 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6038 warning_at (loc, OPT_Winvalid_memory_model,
6039 "invalid failure memory model for "
6040 "%<__atomic_compare_exchange%>");
6041 failure = MEMMODEL_SEQ_CST;
6042 success = MEMMODEL_SEQ_CST;
6046 if (!flag_inline_atomics)
6047 return NULL_RTX;
6049 /* Expand the operands. */
6050 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6052 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6053 expect = convert_memory_address (Pmode, expect);
6054 expect = gen_rtx_MEM (mode, expect);
6055 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6057 weak = CALL_EXPR_ARG (exp, 3);
6058 is_weak = false;
6059 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6060 is_weak = true;
6062 if (target == const0_rtx)
6063 target = NULL;
6065 /* Lest the rtl backend create a race condition with an imporoper store
6066 to memory, always create a new pseudo for OLDVAL. */
6067 oldval = NULL;
6069 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6070 is_weak, success, failure))
6071 return NULL_RTX;
6073 /* Conditionally store back to EXPECT, lest we create a race condition
6074 with an improper store to memory. */
6075 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6076 the normal case where EXPECT is totally private, i.e. a register. At
6077 which point the store can be unconditional. */
6078 label = gen_label_rtx ();
6079 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6080 GET_MODE (target), 1, label);
6081 emit_move_insn (expect, oldval);
6082 emit_label (label);
6084 return target;
6087 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6088 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6089 call. The weak parameter must be dropped to match the expected parameter
6090 list and the expected argument changed from value to pointer to memory
6091 slot. */
6093 static void
6094 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6096 unsigned int z;
6097 vec<tree, va_gc> *vec;
6099 vec_alloc (vec, 5);
6100 vec->quick_push (gimple_call_arg (call, 0));
6101 tree expected = gimple_call_arg (call, 1);
6102 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6103 TREE_TYPE (expected));
6104 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6105 if (expd != x)
6106 emit_move_insn (x, expd);
6107 tree v = make_tree (TREE_TYPE (expected), x);
6108 vec->quick_push (build1 (ADDR_EXPR,
6109 build_pointer_type (TREE_TYPE (expected)), v));
6110 vec->quick_push (gimple_call_arg (call, 2));
6111 /* Skip the boolean weak parameter. */
6112 for (z = 4; z < 6; z++)
6113 vec->quick_push (gimple_call_arg (call, z));
6114 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6115 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6116 gcc_assert (bytes_log2 < 5);
6117 built_in_function fncode
6118 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6119 + bytes_log2);
6120 tree fndecl = builtin_decl_explicit (fncode);
6121 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6122 fndecl);
6123 tree exp = build_call_vec (boolean_type_node, fn, vec);
6124 tree lhs = gimple_call_lhs (call);
6125 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6126 if (lhs)
6128 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6129 if (GET_MODE (boolret) != mode)
6130 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6131 x = force_reg (mode, x);
6132 write_complex_part (target, boolret, true);
6133 write_complex_part (target, x, false);
6137 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6139 void
6140 expand_ifn_atomic_compare_exchange (gcall *call)
6142 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6143 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6144 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6145 rtx expect, desired, mem, oldval, boolret;
6146 enum memmodel success, failure;
6147 tree lhs;
6148 bool is_weak;
6149 source_location loc
6150 = expansion_point_location_if_in_system_header (gimple_location (call));
6152 success = get_memmodel (gimple_call_arg (call, 4));
6153 failure = get_memmodel (gimple_call_arg (call, 5));
6155 if (failure > success)
6157 warning_at (loc, OPT_Winvalid_memory_model,
6158 "failure memory model cannot be stronger than success "
6159 "memory model for %<__atomic_compare_exchange%>");
6160 success = MEMMODEL_SEQ_CST;
6163 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6165 warning_at (loc, OPT_Winvalid_memory_model,
6166 "invalid failure memory model for "
6167 "%<__atomic_compare_exchange%>");
6168 failure = MEMMODEL_SEQ_CST;
6169 success = MEMMODEL_SEQ_CST;
6172 if (!flag_inline_atomics)
6174 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6175 return;
6178 /* Expand the operands. */
6179 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6181 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6182 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6184 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6186 boolret = NULL;
6187 oldval = NULL;
6189 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6190 is_weak, success, failure))
6192 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6193 return;
6196 lhs = gimple_call_lhs (call);
6197 if (lhs)
6199 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6200 if (GET_MODE (boolret) != mode)
6201 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6202 write_complex_part (target, boolret, true);
6203 write_complex_part (target, oldval, false);
6207 /* Expand the __atomic_load intrinsic:
6208 TYPE __atomic_load (TYPE *object, enum memmodel)
6209 EXP is the CALL_EXPR.
6210 TARGET is an optional place for us to store the results. */
6212 static rtx
6213 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6215 rtx mem;
6216 enum memmodel model;
6218 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6219 if (is_mm_release (model) || is_mm_acq_rel (model))
6221 source_location loc
6222 = expansion_point_location_if_in_system_header (input_location);
6223 warning_at (loc, OPT_Winvalid_memory_model,
6224 "invalid memory model for %<__atomic_load%>");
6225 model = MEMMODEL_SEQ_CST;
6228 if (!flag_inline_atomics)
6229 return NULL_RTX;
6231 /* Expand the operand. */
6232 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6234 return expand_atomic_load (target, mem, model);
6238 /* Expand the __atomic_store intrinsic:
6239 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6240 EXP is the CALL_EXPR.
6241 TARGET is an optional place for us to store the results. */
6243 static rtx
6244 expand_builtin_atomic_store (machine_mode mode, tree exp)
6246 rtx mem, val;
6247 enum memmodel model;
6249 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6250 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6251 || is_mm_release (model)))
6253 source_location loc
6254 = expansion_point_location_if_in_system_header (input_location);
6255 warning_at (loc, OPT_Winvalid_memory_model,
6256 "invalid memory model for %<__atomic_store%>");
6257 model = MEMMODEL_SEQ_CST;
6260 if (!flag_inline_atomics)
6261 return NULL_RTX;
6263 /* Expand the operands. */
6264 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6265 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6267 return expand_atomic_store (mem, val, model, false);
6270 /* Expand the __atomic_fetch_XXX intrinsic:
6271 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6272 EXP is the CALL_EXPR.
6273 TARGET is an optional place for us to store the results.
6274 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6275 FETCH_AFTER is true if returning the result of the operation.
6276 FETCH_AFTER is false if returning the value before the operation.
6277 IGNORE is true if the result is not used.
6278 EXT_CALL is the correct builtin for an external call if this cannot be
6279 resolved to an instruction sequence. */
6281 static rtx
6282 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6283 enum rtx_code code, bool fetch_after,
6284 bool ignore, enum built_in_function ext_call)
6286 rtx val, mem, ret;
6287 enum memmodel model;
6288 tree fndecl;
6289 tree addr;
6291 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6293 /* Expand the operands. */
6294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6295 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6297 /* Only try generating instructions if inlining is turned on. */
6298 if (flag_inline_atomics)
6300 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6301 if (ret)
6302 return ret;
6305 /* Return if a different routine isn't needed for the library call. */
6306 if (ext_call == BUILT_IN_NONE)
6307 return NULL_RTX;
6309 /* Change the call to the specified function. */
6310 fndecl = get_callee_fndecl (exp);
6311 addr = CALL_EXPR_FN (exp);
6312 STRIP_NOPS (addr);
6314 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6315 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6317 /* If we will emit code after the call, the call can not be a tail call.
6318 If it is emitted as a tail call, a barrier is emitted after it, and
6319 then all trailing code is removed. */
6320 if (!ignore)
6321 CALL_EXPR_TAILCALL (exp) = 0;
6323 /* Expand the call here so we can emit trailing code. */
6324 ret = expand_call (exp, target, ignore);
6326 /* Replace the original function just in case it matters. */
6327 TREE_OPERAND (addr, 0) = fndecl;
6329 /* Then issue the arithmetic correction to return the right result. */
6330 if (!ignore)
6332 if (code == NOT)
6334 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6335 OPTAB_LIB_WIDEN);
6336 ret = expand_simple_unop (mode, NOT, ret, target, true);
6338 else
6339 ret = expand_simple_binop (mode, code, ret, val, target, true,
6340 OPTAB_LIB_WIDEN);
6342 return ret;
6345 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6347 void
6348 expand_ifn_atomic_bit_test_and (gcall *call)
6350 tree ptr = gimple_call_arg (call, 0);
6351 tree bit = gimple_call_arg (call, 1);
6352 tree flag = gimple_call_arg (call, 2);
6353 tree lhs = gimple_call_lhs (call);
6354 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6355 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6356 enum rtx_code code;
6357 optab optab;
6358 struct expand_operand ops[5];
6360 gcc_assert (flag_inline_atomics);
6362 if (gimple_call_num_args (call) == 4)
6363 model = get_memmodel (gimple_call_arg (call, 3));
6365 rtx mem = get_builtin_sync_mem (ptr, mode);
6366 rtx val = expand_expr_force_mode (bit, mode);
6368 switch (gimple_call_internal_fn (call))
6370 case IFN_ATOMIC_BIT_TEST_AND_SET:
6371 code = IOR;
6372 optab = atomic_bit_test_and_set_optab;
6373 break;
6374 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6375 code = XOR;
6376 optab = atomic_bit_test_and_complement_optab;
6377 break;
6378 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6379 code = AND;
6380 optab = atomic_bit_test_and_reset_optab;
6381 break;
6382 default:
6383 gcc_unreachable ();
6386 if (lhs == NULL_TREE)
6388 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6389 val, NULL_RTX, true, OPTAB_DIRECT);
6390 if (code == AND)
6391 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6392 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6393 return;
6396 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6397 enum insn_code icode = direct_optab_handler (optab, mode);
6398 gcc_assert (icode != CODE_FOR_nothing);
6399 create_output_operand (&ops[0], target, mode);
6400 create_fixed_operand (&ops[1], mem);
6401 create_convert_operand_to (&ops[2], val, mode, true);
6402 create_integer_operand (&ops[3], model);
6403 create_integer_operand (&ops[4], integer_onep (flag));
6404 if (maybe_expand_insn (icode, 5, ops))
6405 return;
6407 rtx bitval = val;
6408 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6409 val, NULL_RTX, true, OPTAB_DIRECT);
6410 rtx maskval = val;
6411 if (code == AND)
6412 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6413 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6414 code, model, false);
6415 if (integer_onep (flag))
6417 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6418 NULL_RTX, true, OPTAB_DIRECT);
6419 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6420 true, OPTAB_DIRECT);
6422 else
6423 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6424 OPTAB_DIRECT);
6425 if (result != target)
6426 emit_move_insn (target, result);
6429 /* Expand an atomic clear operation.
6430 void _atomic_clear (BOOL *obj, enum memmodel)
6431 EXP is the call expression. */
6433 static rtx
6434 expand_builtin_atomic_clear (tree exp)
6436 machine_mode mode;
6437 rtx mem, ret;
6438 enum memmodel model;
6440 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6441 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6442 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6444 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6446 source_location loc
6447 = expansion_point_location_if_in_system_header (input_location);
6448 warning_at (loc, OPT_Winvalid_memory_model,
6449 "invalid memory model for %<__atomic_store%>");
6450 model = MEMMODEL_SEQ_CST;
6453 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6454 Failing that, a store is issued by __atomic_store. The only way this can
6455 fail is if the bool type is larger than a word size. Unlikely, but
6456 handle it anyway for completeness. Assume a single threaded model since
6457 there is no atomic support in this case, and no barriers are required. */
6458 ret = expand_atomic_store (mem, const0_rtx, model, true);
6459 if (!ret)
6460 emit_move_insn (mem, const0_rtx);
6461 return const0_rtx;
6464 /* Expand an atomic test_and_set operation.
6465 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6466 EXP is the call expression. */
6468 static rtx
6469 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6471 rtx mem;
6472 enum memmodel model;
6473 machine_mode mode;
6475 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6476 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6477 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6479 return expand_atomic_test_and_set (target, mem, model);
6483 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6484 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6486 static tree
6487 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6489 int size;
6490 machine_mode mode;
6491 unsigned int mode_align, type_align;
6493 if (TREE_CODE (arg0) != INTEGER_CST)
6494 return NULL_TREE;
6496 /* We need a corresponding integer mode for the access to be lock-free. */
6497 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6498 if (!int_mode_for_size (size, 0).exists (&mode))
6499 return boolean_false_node;
6501 mode_align = GET_MODE_ALIGNMENT (mode);
6503 if (TREE_CODE (arg1) == INTEGER_CST)
6505 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6507 /* Either this argument is null, or it's a fake pointer encoding
6508 the alignment of the object. */
6509 val = least_bit_hwi (val);
6510 val *= BITS_PER_UNIT;
6512 if (val == 0 || mode_align < val)
6513 type_align = mode_align;
6514 else
6515 type_align = val;
6517 else
6519 tree ttype = TREE_TYPE (arg1);
6521 /* This function is usually invoked and folded immediately by the front
6522 end before anything else has a chance to look at it. The pointer
6523 parameter at this point is usually cast to a void *, so check for that
6524 and look past the cast. */
6525 if (CONVERT_EXPR_P (arg1)
6526 && POINTER_TYPE_P (ttype)
6527 && VOID_TYPE_P (TREE_TYPE (ttype))
6528 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6529 arg1 = TREE_OPERAND (arg1, 0);
6531 ttype = TREE_TYPE (arg1);
6532 gcc_assert (POINTER_TYPE_P (ttype));
6534 /* Get the underlying type of the object. */
6535 ttype = TREE_TYPE (ttype);
6536 type_align = TYPE_ALIGN (ttype);
6539 /* If the object has smaller alignment, the lock free routines cannot
6540 be used. */
6541 if (type_align < mode_align)
6542 return boolean_false_node;
6544 /* Check if a compare_and_swap pattern exists for the mode which represents
6545 the required size. The pattern is not allowed to fail, so the existence
6546 of the pattern indicates support is present. Also require that an
6547 atomic load exists for the required size. */
6548 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6549 return boolean_true_node;
6550 else
6551 return boolean_false_node;
6554 /* Return true if the parameters to call EXP represent an object which will
6555 always generate lock free instructions. The first argument represents the
6556 size of the object, and the second parameter is a pointer to the object
6557 itself. If NULL is passed for the object, then the result is based on
6558 typical alignment for an object of the specified size. Otherwise return
6559 false. */
6561 static rtx
6562 expand_builtin_atomic_always_lock_free (tree exp)
6564 tree size;
6565 tree arg0 = CALL_EXPR_ARG (exp, 0);
6566 tree arg1 = CALL_EXPR_ARG (exp, 1);
6568 if (TREE_CODE (arg0) != INTEGER_CST)
6570 error ("non-constant argument 1 to __atomic_always_lock_free");
6571 return const0_rtx;
6574 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6575 if (size == boolean_true_node)
6576 return const1_rtx;
6577 return const0_rtx;
6580 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6581 is lock free on this architecture. */
6583 static tree
6584 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6586 if (!flag_inline_atomics)
6587 return NULL_TREE;
6589 /* If it isn't always lock free, don't generate a result. */
6590 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6591 return boolean_true_node;
6593 return NULL_TREE;
6596 /* Return true if the parameters to call EXP represent an object which will
6597 always generate lock free instructions. The first argument represents the
6598 size of the object, and the second parameter is a pointer to the object
6599 itself. If NULL is passed for the object, then the result is based on
6600 typical alignment for an object of the specified size. Otherwise return
6601 NULL*/
6603 static rtx
6604 expand_builtin_atomic_is_lock_free (tree exp)
6606 tree size;
6607 tree arg0 = CALL_EXPR_ARG (exp, 0);
6608 tree arg1 = CALL_EXPR_ARG (exp, 1);
6610 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6612 error ("non-integer argument 1 to __atomic_is_lock_free");
6613 return NULL_RTX;
6616 if (!flag_inline_atomics)
6617 return NULL_RTX;
6619 /* If the value is known at compile time, return the RTX for it. */
6620 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6621 if (size == boolean_true_node)
6622 return const1_rtx;
6624 return NULL_RTX;
6627 /* Expand the __atomic_thread_fence intrinsic:
6628 void __atomic_thread_fence (enum memmodel)
6629 EXP is the CALL_EXPR. */
6631 static void
6632 expand_builtin_atomic_thread_fence (tree exp)
6634 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6635 expand_mem_thread_fence (model);
6638 /* Expand the __atomic_signal_fence intrinsic:
6639 void __atomic_signal_fence (enum memmodel)
6640 EXP is the CALL_EXPR. */
6642 static void
6643 expand_builtin_atomic_signal_fence (tree exp)
6645 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6646 expand_mem_signal_fence (model);
6649 /* Expand the __sync_synchronize intrinsic. */
6651 static void
6652 expand_builtin_sync_synchronize (void)
6654 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6657 static rtx
6658 expand_builtin_thread_pointer (tree exp, rtx target)
6660 enum insn_code icode;
6661 if (!validate_arglist (exp, VOID_TYPE))
6662 return const0_rtx;
6663 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6664 if (icode != CODE_FOR_nothing)
6666 struct expand_operand op;
6667 /* If the target is not sutitable then create a new target. */
6668 if (target == NULL_RTX
6669 || !REG_P (target)
6670 || GET_MODE (target) != Pmode)
6671 target = gen_reg_rtx (Pmode);
6672 create_output_operand (&op, target, Pmode);
6673 expand_insn (icode, 1, &op);
6674 return target;
6676 error ("__builtin_thread_pointer is not supported on this target");
6677 return const0_rtx;
6680 static void
6681 expand_builtin_set_thread_pointer (tree exp)
6683 enum insn_code icode;
6684 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6685 return;
6686 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6687 if (icode != CODE_FOR_nothing)
6689 struct expand_operand op;
6690 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6691 Pmode, EXPAND_NORMAL);
6692 create_input_operand (&op, val, Pmode);
6693 expand_insn (icode, 1, &op);
6694 return;
6696 error ("__builtin_set_thread_pointer is not supported on this target");
6700 /* Emit code to restore the current value of stack. */
6702 static void
6703 expand_stack_restore (tree var)
6705 rtx_insn *prev;
6706 rtx sa = expand_normal (var);
6708 sa = convert_memory_address (Pmode, sa);
6710 prev = get_last_insn ();
6711 emit_stack_restore (SAVE_BLOCK, sa);
6713 record_new_stack_level ();
6715 fixup_args_size_notes (prev, get_last_insn (), 0);
6718 /* Emit code to save the current value of stack. */
6720 static rtx
6721 expand_stack_save (void)
6723 rtx ret = NULL_RTX;
6725 emit_stack_save (SAVE_BLOCK, &ret);
6726 return ret;
6729 /* Emit code to get the openacc gang, worker or vector id or size. */
6731 static rtx
6732 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6734 const char *name;
6735 rtx fallback_retval;
6736 rtx_insn *(*gen_fn) (rtx, rtx);
6737 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6739 case BUILT_IN_GOACC_PARLEVEL_ID:
6740 name = "__builtin_goacc_parlevel_id";
6741 fallback_retval = const0_rtx;
6742 gen_fn = targetm.gen_oacc_dim_pos;
6743 break;
6744 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6745 name = "__builtin_goacc_parlevel_size";
6746 fallback_retval = const1_rtx;
6747 gen_fn = targetm.gen_oacc_dim_size;
6748 break;
6749 default:
6750 gcc_unreachable ();
6753 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6755 error ("%qs only supported in OpenACC code", name);
6756 return const0_rtx;
6759 tree arg = CALL_EXPR_ARG (exp, 0);
6760 if (TREE_CODE (arg) != INTEGER_CST)
6762 error ("non-constant argument 0 to %qs", name);
6763 return const0_rtx;
6766 int dim = TREE_INT_CST_LOW (arg);
6767 switch (dim)
6769 case GOMP_DIM_GANG:
6770 case GOMP_DIM_WORKER:
6771 case GOMP_DIM_VECTOR:
6772 break;
6773 default:
6774 error ("illegal argument 0 to %qs", name);
6775 return const0_rtx;
6778 if (ignore)
6779 return target;
6781 if (target == NULL_RTX)
6782 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6784 if (!targetm.have_oacc_dim_size ())
6786 emit_move_insn (target, fallback_retval);
6787 return target;
6790 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6791 emit_insn (gen_fn (reg, GEN_INT (dim)));
6792 if (reg != target)
6793 emit_move_insn (target, reg);
6795 return target;
6798 /* Expand a string compare operation using a sequence of char comparison
6799 to get rid of the calling overhead, with result going to TARGET if
6800 that's convenient.
6802 VAR_STR is the variable string source;
6803 CONST_STR is the constant string source;
6804 LENGTH is the number of chars to compare;
6805 CONST_STR_N indicates which source string is the constant string;
6806 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6808 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6810 target = (int) (unsigned char) var_str[0]
6811 - (int) (unsigned char) const_str[0];
6812 if (target != 0)
6813 goto ne_label;
6815 target = (int) (unsigned char) var_str[length - 2]
6816 - (int) (unsigned char) const_str[length - 2];
6817 if (target != 0)
6818 goto ne_label;
6819 target = (int) (unsigned char) var_str[length - 1]
6820 - (int) (unsigned char) const_str[length - 1];
6821 ne_label:
6824 static rtx
6825 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6826 unsigned HOST_WIDE_INT length,
6827 int const_str_n, machine_mode mode)
6829 HOST_WIDE_INT offset = 0;
6830 rtx var_rtx_array
6831 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6832 rtx var_rtx = NULL_RTX;
6833 rtx const_rtx = NULL_RTX;
6834 rtx result = target ? target : gen_reg_rtx (mode);
6835 rtx_code_label *ne_label = gen_label_rtx ();
6836 tree unit_type_node = unsigned_char_type_node;
6837 scalar_int_mode unit_mode
6838 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6840 start_sequence ();
6842 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6844 var_rtx
6845 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6846 const_rtx = c_readstr (const_str + offset, unit_mode);
6847 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6848 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6850 op0 = convert_modes (mode, unit_mode, op0, 1);
6851 op1 = convert_modes (mode, unit_mode, op1, 1);
6852 result = expand_simple_binop (mode, MINUS, op0, op1,
6853 result, 1, OPTAB_WIDEN);
6854 if (i < length - 1)
6855 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6856 mode, true, ne_label);
6857 offset += GET_MODE_SIZE (unit_mode);
6860 emit_label (ne_label);
6861 rtx_insn *insns = get_insns ();
6862 end_sequence ();
6863 emit_insn (insns);
6865 return result;
6868 /* Inline expansion a call to str(n)cmp, with result going to
6869 TARGET if that's convenient.
6870 If the call is not been inlined, return NULL_RTX. */
6871 static rtx
6872 inline_expand_builtin_string_cmp (tree exp, rtx target)
6874 tree fndecl = get_callee_fndecl (exp);
6875 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6876 unsigned HOST_WIDE_INT length = 0;
6877 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6879 /* Do NOT apply this inlining expansion when optimizing for size or
6880 optimization level below 2. */
6881 if (optimize < 2 || optimize_insn_for_size_p ())
6882 return NULL_RTX;
6884 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6885 || fcode == BUILT_IN_STRNCMP
6886 || fcode == BUILT_IN_MEMCMP);
6888 /* On a target where the type of the call (int) has same or narrower presicion
6889 than unsigned char, give up the inlining expansion. */
6890 if (TYPE_PRECISION (unsigned_char_type_node)
6891 >= TYPE_PRECISION (TREE_TYPE (exp)))
6892 return NULL_RTX;
6894 tree arg1 = CALL_EXPR_ARG (exp, 0);
6895 tree arg2 = CALL_EXPR_ARG (exp, 1);
6896 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6898 unsigned HOST_WIDE_INT len1 = 0;
6899 unsigned HOST_WIDE_INT len2 = 0;
6900 unsigned HOST_WIDE_INT len3 = 0;
6902 const char *src_str1 = c_getstr (arg1, &len1);
6903 const char *src_str2 = c_getstr (arg2, &len2);
6905 /* If neither strings is constant string, the call is not qualify. */
6906 if (!src_str1 && !src_str2)
6907 return NULL_RTX;
6909 /* For strncmp, if the length is not a const, not qualify. */
6910 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6911 return NULL_RTX;
6913 int const_str_n = 0;
6914 if (!len1)
6915 const_str_n = 2;
6916 else if (!len2)
6917 const_str_n = 1;
6918 else if (len2 > len1)
6919 const_str_n = 1;
6920 else
6921 const_str_n = 2;
6923 gcc_checking_assert (const_str_n > 0);
6924 length = (const_str_n == 1) ? len1 : len2;
6926 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6927 length = len3;
6929 /* If the length of the comparision is larger than the threshold,
6930 do nothing. */
6931 if (length > (unsigned HOST_WIDE_INT)
6932 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6933 return NULL_RTX;
6935 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6937 /* Now, start inline expansion the call. */
6938 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6939 (const_str_n == 1) ? src_str1 : src_str2, length,
6940 const_str_n, mode);
6943 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6944 represents the size of the first argument to that call, or VOIDmode
6945 if the argument is a pointer. IGNORE will be true if the result
6946 isn't used. */
6947 static rtx
6948 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6949 bool ignore)
6951 rtx val, failsafe;
6952 unsigned nargs = call_expr_nargs (exp);
6954 tree arg0 = CALL_EXPR_ARG (exp, 0);
6956 if (mode == VOIDmode)
6958 mode = TYPE_MODE (TREE_TYPE (arg0));
6959 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6962 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6964 /* An optional second argument can be used as a failsafe value on
6965 some machines. If it isn't present, then the failsafe value is
6966 assumed to be 0. */
6967 if (nargs > 1)
6969 tree arg1 = CALL_EXPR_ARG (exp, 1);
6970 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6972 else
6973 failsafe = const0_rtx;
6975 /* If the result isn't used, the behavior is undefined. It would be
6976 nice to emit a warning here, but path splitting means this might
6977 happen with legitimate code. So simply drop the builtin
6978 expansion in that case; we've handled any side-effects above. */
6979 if (ignore)
6980 return const0_rtx;
6982 /* If we don't have a suitable target, create one to hold the result. */
6983 if (target == NULL || GET_MODE (target) != mode)
6984 target = gen_reg_rtx (mode);
6986 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6987 val = convert_modes (mode, VOIDmode, val, false);
6989 return targetm.speculation_safe_value (mode, target, val, failsafe);
6992 /* Expand an expression EXP that calls a built-in function,
6993 with result going to TARGET if that's convenient
6994 (and in mode MODE if that's convenient).
6995 SUBTARGET may be used as the target for computing one of EXP's operands.
6996 IGNORE is nonzero if the value is to be ignored. */
6999 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7000 int ignore)
7002 tree fndecl = get_callee_fndecl (exp);
7003 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7004 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7005 int flags;
7007 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7008 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7010 /* When ASan is enabled, we don't want to expand some memory/string
7011 builtins and rely on libsanitizer's hooks. This allows us to avoid
7012 redundant checks and be sure, that possible overflow will be detected
7013 by ASan. */
7015 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7016 return expand_call (exp, target, ignore);
7018 /* When not optimizing, generate calls to library functions for a certain
7019 set of builtins. */
7020 if (!optimize
7021 && !called_as_built_in (fndecl)
7022 && fcode != BUILT_IN_FORK
7023 && fcode != BUILT_IN_EXECL
7024 && fcode != BUILT_IN_EXECV
7025 && fcode != BUILT_IN_EXECLP
7026 && fcode != BUILT_IN_EXECLE
7027 && fcode != BUILT_IN_EXECVP
7028 && fcode != BUILT_IN_EXECVE
7029 && !ALLOCA_FUNCTION_CODE_P (fcode)
7030 && fcode != BUILT_IN_FREE)
7031 return expand_call (exp, target, ignore);
7033 /* The built-in function expanders test for target == const0_rtx
7034 to determine whether the function's result will be ignored. */
7035 if (ignore)
7036 target = const0_rtx;
7038 /* If the result of a pure or const built-in function is ignored, and
7039 none of its arguments are volatile, we can avoid expanding the
7040 built-in call and just evaluate the arguments for side-effects. */
7041 if (target == const0_rtx
7042 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7043 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7045 bool volatilep = false;
7046 tree arg;
7047 call_expr_arg_iterator iter;
7049 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7050 if (TREE_THIS_VOLATILE (arg))
7052 volatilep = true;
7053 break;
7056 if (! volatilep)
7058 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7059 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7060 return const0_rtx;
7064 switch (fcode)
7066 CASE_FLT_FN (BUILT_IN_FABS):
7067 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7068 case BUILT_IN_FABSD32:
7069 case BUILT_IN_FABSD64:
7070 case BUILT_IN_FABSD128:
7071 target = expand_builtin_fabs (exp, target, subtarget);
7072 if (target)
7073 return target;
7074 break;
7076 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7077 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7078 target = expand_builtin_copysign (exp, target, subtarget);
7079 if (target)
7080 return target;
7081 break;
7083 /* Just do a normal library call if we were unable to fold
7084 the values. */
7085 CASE_FLT_FN (BUILT_IN_CABS):
7086 break;
7088 CASE_FLT_FN (BUILT_IN_FMA):
7089 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7090 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7091 if (target)
7092 return target;
7093 break;
7095 CASE_FLT_FN (BUILT_IN_ILOGB):
7096 if (! flag_unsafe_math_optimizations)
7097 break;
7098 gcc_fallthrough ();
7099 CASE_FLT_FN (BUILT_IN_ISINF):
7100 CASE_FLT_FN (BUILT_IN_FINITE):
7101 case BUILT_IN_ISFINITE:
7102 case BUILT_IN_ISNORMAL:
7103 target = expand_builtin_interclass_mathfn (exp, target);
7104 if (target)
7105 return target;
7106 break;
7108 CASE_FLT_FN (BUILT_IN_ICEIL):
7109 CASE_FLT_FN (BUILT_IN_LCEIL):
7110 CASE_FLT_FN (BUILT_IN_LLCEIL):
7111 CASE_FLT_FN (BUILT_IN_LFLOOR):
7112 CASE_FLT_FN (BUILT_IN_IFLOOR):
7113 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7114 target = expand_builtin_int_roundingfn (exp, target);
7115 if (target)
7116 return target;
7117 break;
7119 CASE_FLT_FN (BUILT_IN_IRINT):
7120 CASE_FLT_FN (BUILT_IN_LRINT):
7121 CASE_FLT_FN (BUILT_IN_LLRINT):
7122 CASE_FLT_FN (BUILT_IN_IROUND):
7123 CASE_FLT_FN (BUILT_IN_LROUND):
7124 CASE_FLT_FN (BUILT_IN_LLROUND):
7125 target = expand_builtin_int_roundingfn_2 (exp, target);
7126 if (target)
7127 return target;
7128 break;
7130 CASE_FLT_FN (BUILT_IN_POWI):
7131 target = expand_builtin_powi (exp, target);
7132 if (target)
7133 return target;
7134 break;
7136 CASE_FLT_FN (BUILT_IN_CEXPI):
7137 target = expand_builtin_cexpi (exp, target);
7138 gcc_assert (target);
7139 return target;
7141 CASE_FLT_FN (BUILT_IN_SIN):
7142 CASE_FLT_FN (BUILT_IN_COS):
7143 if (! flag_unsafe_math_optimizations)
7144 break;
7145 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7146 if (target)
7147 return target;
7148 break;
7150 CASE_FLT_FN (BUILT_IN_SINCOS):
7151 if (! flag_unsafe_math_optimizations)
7152 break;
7153 target = expand_builtin_sincos (exp);
7154 if (target)
7155 return target;
7156 break;
7158 case BUILT_IN_APPLY_ARGS:
7159 return expand_builtin_apply_args ();
7161 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7162 FUNCTION with a copy of the parameters described by
7163 ARGUMENTS, and ARGSIZE. It returns a block of memory
7164 allocated on the stack into which is stored all the registers
7165 that might possibly be used for returning the result of a
7166 function. ARGUMENTS is the value returned by
7167 __builtin_apply_args. ARGSIZE is the number of bytes of
7168 arguments that must be copied. ??? How should this value be
7169 computed? We'll also need a safe worst case value for varargs
7170 functions. */
7171 case BUILT_IN_APPLY:
7172 if (!validate_arglist (exp, POINTER_TYPE,
7173 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7174 && !validate_arglist (exp, REFERENCE_TYPE,
7175 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7176 return const0_rtx;
7177 else
7179 rtx ops[3];
7181 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7182 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7183 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7185 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7188 /* __builtin_return (RESULT) causes the function to return the
7189 value described by RESULT. RESULT is address of the block of
7190 memory returned by __builtin_apply. */
7191 case BUILT_IN_RETURN:
7192 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7193 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7194 return const0_rtx;
7196 case BUILT_IN_SAVEREGS:
7197 return expand_builtin_saveregs ();
7199 case BUILT_IN_VA_ARG_PACK:
7200 /* All valid uses of __builtin_va_arg_pack () are removed during
7201 inlining. */
7202 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7203 return const0_rtx;
7205 case BUILT_IN_VA_ARG_PACK_LEN:
7206 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7207 inlining. */
7208 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7209 return const0_rtx;
7211 /* Return the address of the first anonymous stack arg. */
7212 case BUILT_IN_NEXT_ARG:
7213 if (fold_builtin_next_arg (exp, false))
7214 return const0_rtx;
7215 return expand_builtin_next_arg ();
7217 case BUILT_IN_CLEAR_CACHE:
7218 target = expand_builtin___clear_cache (exp);
7219 if (target)
7220 return target;
7221 break;
7223 case BUILT_IN_CLASSIFY_TYPE:
7224 return expand_builtin_classify_type (exp);
7226 case BUILT_IN_CONSTANT_P:
7227 return const0_rtx;
7229 case BUILT_IN_FRAME_ADDRESS:
7230 case BUILT_IN_RETURN_ADDRESS:
7231 return expand_builtin_frame_address (fndecl, exp);
7233 /* Returns the address of the area where the structure is returned.
7234 0 otherwise. */
7235 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7236 if (call_expr_nargs (exp) != 0
7237 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7238 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7239 return const0_rtx;
7240 else
7241 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7243 CASE_BUILT_IN_ALLOCA:
7244 target = expand_builtin_alloca (exp);
7245 if (target)
7246 return target;
7247 break;
7249 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7250 return expand_asan_emit_allocas_unpoison (exp);
7252 case BUILT_IN_STACK_SAVE:
7253 return expand_stack_save ();
7255 case BUILT_IN_STACK_RESTORE:
7256 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7257 return const0_rtx;
7259 case BUILT_IN_BSWAP16:
7260 case BUILT_IN_BSWAP32:
7261 case BUILT_IN_BSWAP64:
7262 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7263 if (target)
7264 return target;
7265 break;
7267 CASE_INT_FN (BUILT_IN_FFS):
7268 target = expand_builtin_unop (target_mode, exp, target,
7269 subtarget, ffs_optab);
7270 if (target)
7271 return target;
7272 break;
7274 CASE_INT_FN (BUILT_IN_CLZ):
7275 target = expand_builtin_unop (target_mode, exp, target,
7276 subtarget, clz_optab);
7277 if (target)
7278 return target;
7279 break;
7281 CASE_INT_FN (BUILT_IN_CTZ):
7282 target = expand_builtin_unop (target_mode, exp, target,
7283 subtarget, ctz_optab);
7284 if (target)
7285 return target;
7286 break;
7288 CASE_INT_FN (BUILT_IN_CLRSB):
7289 target = expand_builtin_unop (target_mode, exp, target,
7290 subtarget, clrsb_optab);
7291 if (target)
7292 return target;
7293 break;
7295 CASE_INT_FN (BUILT_IN_POPCOUNT):
7296 target = expand_builtin_unop (target_mode, exp, target,
7297 subtarget, popcount_optab);
7298 if (target)
7299 return target;
7300 break;
7302 CASE_INT_FN (BUILT_IN_PARITY):
7303 target = expand_builtin_unop (target_mode, exp, target,
7304 subtarget, parity_optab);
7305 if (target)
7306 return target;
7307 break;
7309 case BUILT_IN_STRLEN:
7310 target = expand_builtin_strlen (exp, target, target_mode);
7311 if (target)
7312 return target;
7313 break;
7315 case BUILT_IN_STRNLEN:
7316 target = expand_builtin_strnlen (exp, target, target_mode);
7317 if (target)
7318 return target;
7319 break;
7321 case BUILT_IN_STRCAT:
7322 target = expand_builtin_strcat (exp, target);
7323 if (target)
7324 return target;
7325 break;
7327 case BUILT_IN_STRCPY:
7328 target = expand_builtin_strcpy (exp, target);
7329 if (target)
7330 return target;
7331 break;
7333 case BUILT_IN_STRNCAT:
7334 target = expand_builtin_strncat (exp, target);
7335 if (target)
7336 return target;
7337 break;
7339 case BUILT_IN_STRNCPY:
7340 target = expand_builtin_strncpy (exp, target);
7341 if (target)
7342 return target;
7343 break;
7345 case BUILT_IN_STPCPY:
7346 target = expand_builtin_stpcpy (exp, target, mode);
7347 if (target)
7348 return target;
7349 break;
7351 case BUILT_IN_STPNCPY:
7352 target = expand_builtin_stpncpy (exp, target);
7353 if (target)
7354 return target;
7355 break;
7357 case BUILT_IN_MEMCHR:
7358 target = expand_builtin_memchr (exp, target);
7359 if (target)
7360 return target;
7361 break;
7363 case BUILT_IN_MEMCPY:
7364 target = expand_builtin_memcpy (exp, target);
7365 if (target)
7366 return target;
7367 break;
7369 case BUILT_IN_MEMMOVE:
7370 target = expand_builtin_memmove (exp, target);
7371 if (target)
7372 return target;
7373 break;
7375 case BUILT_IN_MEMPCPY:
7376 target = expand_builtin_mempcpy (exp, target);
7377 if (target)
7378 return target;
7379 break;
7381 case BUILT_IN_MEMSET:
7382 target = expand_builtin_memset (exp, target, mode);
7383 if (target)
7384 return target;
7385 break;
7387 case BUILT_IN_BZERO:
7388 target = expand_builtin_bzero (exp);
7389 if (target)
7390 return target;
7391 break;
7393 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7394 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7395 when changing it to a strcmp call. */
7396 case BUILT_IN_STRCMP_EQ:
7397 target = expand_builtin_memcmp (exp, target, true);
7398 if (target)
7399 return target;
7401 /* Change this call back to a BUILT_IN_STRCMP. */
7402 TREE_OPERAND (exp, 1)
7403 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7405 /* Delete the last parameter. */
7406 unsigned int i;
7407 vec<tree, va_gc> *arg_vec;
7408 vec_alloc (arg_vec, 2);
7409 for (i = 0; i < 2; i++)
7410 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7411 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7412 /* FALLTHROUGH */
7414 case BUILT_IN_STRCMP:
7415 target = expand_builtin_strcmp (exp, target);
7416 if (target)
7417 return target;
7418 break;
7420 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7421 back to a BUILT_IN_STRNCMP. */
7422 case BUILT_IN_STRNCMP_EQ:
7423 target = expand_builtin_memcmp (exp, target, true);
7424 if (target)
7425 return target;
7427 /* Change it back to a BUILT_IN_STRNCMP. */
7428 TREE_OPERAND (exp, 1)
7429 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7430 /* FALLTHROUGH */
7432 case BUILT_IN_STRNCMP:
7433 target = expand_builtin_strncmp (exp, target, mode);
7434 if (target)
7435 return target;
7436 break;
7438 case BUILT_IN_BCMP:
7439 case BUILT_IN_MEMCMP:
7440 case BUILT_IN_MEMCMP_EQ:
7441 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7442 if (target)
7443 return target;
7444 if (fcode == BUILT_IN_MEMCMP_EQ)
7446 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7447 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7449 break;
7451 case BUILT_IN_SETJMP:
7452 /* This should have been lowered to the builtins below. */
7453 gcc_unreachable ();
7455 case BUILT_IN_SETJMP_SETUP:
7456 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7457 and the receiver label. */
7458 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7460 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7461 VOIDmode, EXPAND_NORMAL);
7462 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7463 rtx_insn *label_r = label_rtx (label);
7465 /* This is copied from the handling of non-local gotos. */
7466 expand_builtin_setjmp_setup (buf_addr, label_r);
7467 nonlocal_goto_handler_labels
7468 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7469 nonlocal_goto_handler_labels);
7470 /* ??? Do not let expand_label treat us as such since we would
7471 not want to be both on the list of non-local labels and on
7472 the list of forced labels. */
7473 FORCED_LABEL (label) = 0;
7474 return const0_rtx;
7476 break;
7478 case BUILT_IN_SETJMP_RECEIVER:
7479 /* __builtin_setjmp_receiver is passed the receiver label. */
7480 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7482 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7483 rtx_insn *label_r = label_rtx (label);
7485 expand_builtin_setjmp_receiver (label_r);
7486 return const0_rtx;
7488 break;
7490 /* __builtin_longjmp is passed a pointer to an array of five words.
7491 It's similar to the C library longjmp function but works with
7492 __builtin_setjmp above. */
7493 case BUILT_IN_LONGJMP:
7494 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7496 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7497 VOIDmode, EXPAND_NORMAL);
7498 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7500 if (value != const1_rtx)
7502 error ("%<__builtin_longjmp%> second argument must be 1");
7503 return const0_rtx;
7506 expand_builtin_longjmp (buf_addr, value);
7507 return const0_rtx;
7509 break;
7511 case BUILT_IN_NONLOCAL_GOTO:
7512 target = expand_builtin_nonlocal_goto (exp);
7513 if (target)
7514 return target;
7515 break;
7517 /* This updates the setjmp buffer that is its argument with the value
7518 of the current stack pointer. */
7519 case BUILT_IN_UPDATE_SETJMP_BUF:
7520 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7522 rtx buf_addr
7523 = expand_normal (CALL_EXPR_ARG (exp, 0));
7525 expand_builtin_update_setjmp_buf (buf_addr);
7526 return const0_rtx;
7528 break;
7530 case BUILT_IN_TRAP:
7531 expand_builtin_trap ();
7532 return const0_rtx;
7534 case BUILT_IN_UNREACHABLE:
7535 expand_builtin_unreachable ();
7536 return const0_rtx;
7538 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7539 case BUILT_IN_SIGNBITD32:
7540 case BUILT_IN_SIGNBITD64:
7541 case BUILT_IN_SIGNBITD128:
7542 target = expand_builtin_signbit (exp, target);
7543 if (target)
7544 return target;
7545 break;
7547 /* Various hooks for the DWARF 2 __throw routine. */
7548 case BUILT_IN_UNWIND_INIT:
7549 expand_builtin_unwind_init ();
7550 return const0_rtx;
7551 case BUILT_IN_DWARF_CFA:
7552 return virtual_cfa_rtx;
7553 #ifdef DWARF2_UNWIND_INFO
7554 case BUILT_IN_DWARF_SP_COLUMN:
7555 return expand_builtin_dwarf_sp_column ();
7556 case BUILT_IN_INIT_DWARF_REG_SIZES:
7557 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7558 return const0_rtx;
7559 #endif
7560 case BUILT_IN_FROB_RETURN_ADDR:
7561 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7562 case BUILT_IN_EXTRACT_RETURN_ADDR:
7563 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7564 case BUILT_IN_EH_RETURN:
7565 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7566 CALL_EXPR_ARG (exp, 1));
7567 return const0_rtx;
7568 case BUILT_IN_EH_RETURN_DATA_REGNO:
7569 return expand_builtin_eh_return_data_regno (exp);
7570 case BUILT_IN_EXTEND_POINTER:
7571 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7572 case BUILT_IN_EH_POINTER:
7573 return expand_builtin_eh_pointer (exp);
7574 case BUILT_IN_EH_FILTER:
7575 return expand_builtin_eh_filter (exp);
7576 case BUILT_IN_EH_COPY_VALUES:
7577 return expand_builtin_eh_copy_values (exp);
7579 case BUILT_IN_VA_START:
7580 return expand_builtin_va_start (exp);
7581 case BUILT_IN_VA_END:
7582 return expand_builtin_va_end (exp);
7583 case BUILT_IN_VA_COPY:
7584 return expand_builtin_va_copy (exp);
7585 case BUILT_IN_EXPECT:
7586 return expand_builtin_expect (exp, target);
7587 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7588 return expand_builtin_expect_with_probability (exp, target);
7589 case BUILT_IN_ASSUME_ALIGNED:
7590 return expand_builtin_assume_aligned (exp, target);
7591 case BUILT_IN_PREFETCH:
7592 expand_builtin_prefetch (exp);
7593 return const0_rtx;
7595 case BUILT_IN_INIT_TRAMPOLINE:
7596 return expand_builtin_init_trampoline (exp, true);
7597 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7598 return expand_builtin_init_trampoline (exp, false);
7599 case BUILT_IN_ADJUST_TRAMPOLINE:
7600 return expand_builtin_adjust_trampoline (exp);
7602 case BUILT_IN_INIT_DESCRIPTOR:
7603 return expand_builtin_init_descriptor (exp);
7604 case BUILT_IN_ADJUST_DESCRIPTOR:
7605 return expand_builtin_adjust_descriptor (exp);
7607 case BUILT_IN_FORK:
7608 case BUILT_IN_EXECL:
7609 case BUILT_IN_EXECV:
7610 case BUILT_IN_EXECLP:
7611 case BUILT_IN_EXECLE:
7612 case BUILT_IN_EXECVP:
7613 case BUILT_IN_EXECVE:
7614 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7615 if (target)
7616 return target;
7617 break;
7619 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7620 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7621 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7622 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7623 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7624 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7625 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7626 if (target)
7627 return target;
7628 break;
7630 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7631 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7632 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7633 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7634 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7635 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7636 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7637 if (target)
7638 return target;
7639 break;
7641 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7642 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7643 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7644 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7645 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7646 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7647 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7648 if (target)
7649 return target;
7650 break;
7652 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7653 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7654 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7655 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7656 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7657 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7658 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7659 if (target)
7660 return target;
7661 break;
7663 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7664 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7665 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7666 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7667 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7668 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7669 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7670 if (target)
7671 return target;
7672 break;
7674 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7675 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7676 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7677 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7678 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7679 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7680 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7681 if (target)
7682 return target;
7683 break;
7685 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7686 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7687 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7688 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7689 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7690 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7691 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7692 if (target)
7693 return target;
7694 break;
7696 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7697 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7698 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7699 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7700 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7701 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7702 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7703 if (target)
7704 return target;
7705 break;
7707 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7708 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7709 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7710 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7711 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7712 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7713 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7714 if (target)
7715 return target;
7716 break;
7718 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7719 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7720 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7721 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7722 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7723 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7724 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7725 if (target)
7726 return target;
7727 break;
7729 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7730 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7731 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7732 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7733 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7734 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7735 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7736 if (target)
7737 return target;
7738 break;
7740 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7741 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7742 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7743 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7744 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7745 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7746 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7747 if (target)
7748 return target;
7749 break;
7751 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7752 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7753 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7754 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7755 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7756 if (mode == VOIDmode)
7757 mode = TYPE_MODE (boolean_type_node);
7758 if (!target || !register_operand (target, mode))
7759 target = gen_reg_rtx (mode);
7761 mode = get_builtin_sync_mode
7762 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7763 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7764 if (target)
7765 return target;
7766 break;
7768 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7769 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7770 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7771 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7772 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7773 mode = get_builtin_sync_mode
7774 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7775 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7776 if (target)
7777 return target;
7778 break;
7780 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7781 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7782 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7783 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7784 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7785 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7786 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7787 if (target)
7788 return target;
7789 break;
7791 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7792 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7793 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7794 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7795 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7796 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7797 expand_builtin_sync_lock_release (mode, exp);
7798 return const0_rtx;
7800 case BUILT_IN_SYNC_SYNCHRONIZE:
7801 expand_builtin_sync_synchronize ();
7802 return const0_rtx;
7804 case BUILT_IN_ATOMIC_EXCHANGE_1:
7805 case BUILT_IN_ATOMIC_EXCHANGE_2:
7806 case BUILT_IN_ATOMIC_EXCHANGE_4:
7807 case BUILT_IN_ATOMIC_EXCHANGE_8:
7808 case BUILT_IN_ATOMIC_EXCHANGE_16:
7809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7810 target = expand_builtin_atomic_exchange (mode, exp, target);
7811 if (target)
7812 return target;
7813 break;
7815 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7816 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7817 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7818 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7819 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7821 unsigned int nargs, z;
7822 vec<tree, va_gc> *vec;
7824 mode =
7825 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7826 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7827 if (target)
7828 return target;
7830 /* If this is turned into an external library call, the weak parameter
7831 must be dropped to match the expected parameter list. */
7832 nargs = call_expr_nargs (exp);
7833 vec_alloc (vec, nargs - 1);
7834 for (z = 0; z < 3; z++)
7835 vec->quick_push (CALL_EXPR_ARG (exp, z));
7836 /* Skip the boolean weak parameter. */
7837 for (z = 4; z < 6; z++)
7838 vec->quick_push (CALL_EXPR_ARG (exp, z));
7839 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7840 break;
7843 case BUILT_IN_ATOMIC_LOAD_1:
7844 case BUILT_IN_ATOMIC_LOAD_2:
7845 case BUILT_IN_ATOMIC_LOAD_4:
7846 case BUILT_IN_ATOMIC_LOAD_8:
7847 case BUILT_IN_ATOMIC_LOAD_16:
7848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7849 target = expand_builtin_atomic_load (mode, exp, target);
7850 if (target)
7851 return target;
7852 break;
7854 case BUILT_IN_ATOMIC_STORE_1:
7855 case BUILT_IN_ATOMIC_STORE_2:
7856 case BUILT_IN_ATOMIC_STORE_4:
7857 case BUILT_IN_ATOMIC_STORE_8:
7858 case BUILT_IN_ATOMIC_STORE_16:
7859 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7860 target = expand_builtin_atomic_store (mode, exp);
7861 if (target)
7862 return const0_rtx;
7863 break;
7865 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7866 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7867 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7868 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7869 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7871 enum built_in_function lib;
7872 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7873 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7874 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7875 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7876 ignore, lib);
7877 if (target)
7878 return target;
7879 break;
7881 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7882 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7883 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7884 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7885 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7887 enum built_in_function lib;
7888 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7889 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7890 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7891 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7892 ignore, lib);
7893 if (target)
7894 return target;
7895 break;
7897 case BUILT_IN_ATOMIC_AND_FETCH_1:
7898 case BUILT_IN_ATOMIC_AND_FETCH_2:
7899 case BUILT_IN_ATOMIC_AND_FETCH_4:
7900 case BUILT_IN_ATOMIC_AND_FETCH_8:
7901 case BUILT_IN_ATOMIC_AND_FETCH_16:
7903 enum built_in_function lib;
7904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7905 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7906 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7907 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7908 ignore, lib);
7909 if (target)
7910 return target;
7911 break;
7913 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7914 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7915 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7916 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7917 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7919 enum built_in_function lib;
7920 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7921 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7922 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7923 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7924 ignore, lib);
7925 if (target)
7926 return target;
7927 break;
7929 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7930 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7931 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7932 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7933 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7935 enum built_in_function lib;
7936 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7937 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7938 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7939 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7940 ignore, lib);
7941 if (target)
7942 return target;
7943 break;
7945 case BUILT_IN_ATOMIC_OR_FETCH_1:
7946 case BUILT_IN_ATOMIC_OR_FETCH_2:
7947 case BUILT_IN_ATOMIC_OR_FETCH_4:
7948 case BUILT_IN_ATOMIC_OR_FETCH_8:
7949 case BUILT_IN_ATOMIC_OR_FETCH_16:
7951 enum built_in_function lib;
7952 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7953 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7954 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7955 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7956 ignore, lib);
7957 if (target)
7958 return target;
7959 break;
7961 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7962 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7963 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7964 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7965 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7966 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7967 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7968 ignore, BUILT_IN_NONE);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7974 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7975 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7976 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7977 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7979 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7980 ignore, BUILT_IN_NONE);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_ATOMIC_FETCH_AND_1:
7986 case BUILT_IN_ATOMIC_FETCH_AND_2:
7987 case BUILT_IN_ATOMIC_FETCH_AND_4:
7988 case BUILT_IN_ATOMIC_FETCH_AND_8:
7989 case BUILT_IN_ATOMIC_FETCH_AND_16:
7990 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7991 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7992 ignore, BUILT_IN_NONE);
7993 if (target)
7994 return target;
7995 break;
7997 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7998 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7999 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8000 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8001 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8002 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8003 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8004 ignore, BUILT_IN_NONE);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8010 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8011 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8012 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8013 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8014 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8015 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8016 ignore, BUILT_IN_NONE);
8017 if (target)
8018 return target;
8019 break;
8021 case BUILT_IN_ATOMIC_FETCH_OR_1:
8022 case BUILT_IN_ATOMIC_FETCH_OR_2:
8023 case BUILT_IN_ATOMIC_FETCH_OR_4:
8024 case BUILT_IN_ATOMIC_FETCH_OR_8:
8025 case BUILT_IN_ATOMIC_FETCH_OR_16:
8026 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8027 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8028 ignore, BUILT_IN_NONE);
8029 if (target)
8030 return target;
8031 break;
8033 case BUILT_IN_ATOMIC_TEST_AND_SET:
8034 return expand_builtin_atomic_test_and_set (exp, target);
8036 case BUILT_IN_ATOMIC_CLEAR:
8037 return expand_builtin_atomic_clear (exp);
8039 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8040 return expand_builtin_atomic_always_lock_free (exp);
8042 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8043 target = expand_builtin_atomic_is_lock_free (exp);
8044 if (target)
8045 return target;
8046 break;
8048 case BUILT_IN_ATOMIC_THREAD_FENCE:
8049 expand_builtin_atomic_thread_fence (exp);
8050 return const0_rtx;
8052 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8053 expand_builtin_atomic_signal_fence (exp);
8054 return const0_rtx;
8056 case BUILT_IN_OBJECT_SIZE:
8057 return expand_builtin_object_size (exp);
8059 case BUILT_IN_MEMCPY_CHK:
8060 case BUILT_IN_MEMPCPY_CHK:
8061 case BUILT_IN_MEMMOVE_CHK:
8062 case BUILT_IN_MEMSET_CHK:
8063 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8064 if (target)
8065 return target;
8066 break;
8068 case BUILT_IN_STRCPY_CHK:
8069 case BUILT_IN_STPCPY_CHK:
8070 case BUILT_IN_STRNCPY_CHK:
8071 case BUILT_IN_STPNCPY_CHK:
8072 case BUILT_IN_STRCAT_CHK:
8073 case BUILT_IN_STRNCAT_CHK:
8074 case BUILT_IN_SNPRINTF_CHK:
8075 case BUILT_IN_VSNPRINTF_CHK:
8076 maybe_emit_chk_warning (exp, fcode);
8077 break;
8079 case BUILT_IN_SPRINTF_CHK:
8080 case BUILT_IN_VSPRINTF_CHK:
8081 maybe_emit_sprintf_chk_warning (exp, fcode);
8082 break;
8084 case BUILT_IN_FREE:
8085 if (warn_free_nonheap_object)
8086 maybe_emit_free_warning (exp);
8087 break;
8089 case BUILT_IN_THREAD_POINTER:
8090 return expand_builtin_thread_pointer (exp, target);
8092 case BUILT_IN_SET_THREAD_POINTER:
8093 expand_builtin_set_thread_pointer (exp);
8094 return const0_rtx;
8096 case BUILT_IN_ACC_ON_DEVICE:
8097 /* Do library call, if we failed to expand the builtin when
8098 folding. */
8099 break;
8101 case BUILT_IN_GOACC_PARLEVEL_ID:
8102 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8103 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8105 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8106 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8108 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8109 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8110 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8111 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8112 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8113 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8114 return expand_speculation_safe_value (mode, exp, target, ignore);
8116 default: /* just do library call, if unknown builtin */
8117 break;
8120 /* The switch statement above can drop through to cause the function
8121 to be called normally. */
8122 return expand_call (exp, target, ignore);
8125 /* Determine whether a tree node represents a call to a built-in
8126 function. If the tree T is a call to a built-in function with
8127 the right number of arguments of the appropriate types, return
8128 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8129 Otherwise the return value is END_BUILTINS. */
8131 enum built_in_function
8132 builtin_mathfn_code (const_tree t)
8134 const_tree fndecl, arg, parmlist;
8135 const_tree argtype, parmtype;
8136 const_call_expr_arg_iterator iter;
8138 if (TREE_CODE (t) != CALL_EXPR)
8139 return END_BUILTINS;
8141 fndecl = get_callee_fndecl (t);
8142 if (fndecl == NULL_TREE
8143 || TREE_CODE (fndecl) != FUNCTION_DECL
8144 || ! DECL_BUILT_IN (fndecl)
8145 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8146 return END_BUILTINS;
8148 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8149 init_const_call_expr_arg_iterator (t, &iter);
8150 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8152 /* If a function doesn't take a variable number of arguments,
8153 the last element in the list will have type `void'. */
8154 parmtype = TREE_VALUE (parmlist);
8155 if (VOID_TYPE_P (parmtype))
8157 if (more_const_call_expr_args_p (&iter))
8158 return END_BUILTINS;
8159 return DECL_FUNCTION_CODE (fndecl);
8162 if (! more_const_call_expr_args_p (&iter))
8163 return END_BUILTINS;
8165 arg = next_const_call_expr_arg (&iter);
8166 argtype = TREE_TYPE (arg);
8168 if (SCALAR_FLOAT_TYPE_P (parmtype))
8170 if (! SCALAR_FLOAT_TYPE_P (argtype))
8171 return END_BUILTINS;
8173 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8175 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8176 return END_BUILTINS;
8178 else if (POINTER_TYPE_P (parmtype))
8180 if (! POINTER_TYPE_P (argtype))
8181 return END_BUILTINS;
8183 else if (INTEGRAL_TYPE_P (parmtype))
8185 if (! INTEGRAL_TYPE_P (argtype))
8186 return END_BUILTINS;
8188 else
8189 return END_BUILTINS;
8192 /* Variable-length argument list. */
8193 return DECL_FUNCTION_CODE (fndecl);
8196 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8197 evaluate to a constant. */
8199 static tree
8200 fold_builtin_constant_p (tree arg)
8202 /* We return 1 for a numeric type that's known to be a constant
8203 value at compile-time or for an aggregate type that's a
8204 literal constant. */
8205 STRIP_NOPS (arg);
8207 /* If we know this is a constant, emit the constant of one. */
8208 if (CONSTANT_CLASS_P (arg)
8209 || (TREE_CODE (arg) == CONSTRUCTOR
8210 && TREE_CONSTANT (arg)))
8211 return integer_one_node;
8212 if (TREE_CODE (arg) == ADDR_EXPR)
8214 tree op = TREE_OPERAND (arg, 0);
8215 if (TREE_CODE (op) == STRING_CST
8216 || (TREE_CODE (op) == ARRAY_REF
8217 && integer_zerop (TREE_OPERAND (op, 1))
8218 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8219 return integer_one_node;
8222 /* If this expression has side effects, show we don't know it to be a
8223 constant. Likewise if it's a pointer or aggregate type since in
8224 those case we only want literals, since those are only optimized
8225 when generating RTL, not later.
8226 And finally, if we are compiling an initializer, not code, we
8227 need to return a definite result now; there's not going to be any
8228 more optimization done. */
8229 if (TREE_SIDE_EFFECTS (arg)
8230 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8231 || POINTER_TYPE_P (TREE_TYPE (arg))
8232 || cfun == 0
8233 || folding_initializer
8234 || force_folding_builtin_constant_p)
8235 return integer_zero_node;
8237 return NULL_TREE;
8240 /* Create builtin_expect or builtin_expect_with_probability
8241 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8242 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8243 builtin_expect_with_probability instead uses third argument as PROBABILITY
8244 value. */
8246 static tree
8247 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8248 tree predictor, tree probability)
8250 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8252 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8253 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8254 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8255 ret_type = TREE_TYPE (TREE_TYPE (fn));
8256 pred_type = TREE_VALUE (arg_types);
8257 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8259 pred = fold_convert_loc (loc, pred_type, pred);
8260 expected = fold_convert_loc (loc, expected_type, expected);
8262 if (probability)
8263 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8264 else
8265 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8266 predictor);
8268 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8269 build_int_cst (ret_type, 0));
8272 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8273 NULL_TREE if no simplification is possible. */
8275 tree
8276 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8277 tree arg3)
8279 tree inner, fndecl, inner_arg0;
8280 enum tree_code code;
8282 /* Distribute the expected value over short-circuiting operators.
8283 See through the cast from truthvalue_type_node to long. */
8284 inner_arg0 = arg0;
8285 while (CONVERT_EXPR_P (inner_arg0)
8286 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8287 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8288 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8290 /* If this is a builtin_expect within a builtin_expect keep the
8291 inner one. See through a comparison against a constant. It
8292 might have been added to create a thruthvalue. */
8293 inner = inner_arg0;
8295 if (COMPARISON_CLASS_P (inner)
8296 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8297 inner = TREE_OPERAND (inner, 0);
8299 if (TREE_CODE (inner) == CALL_EXPR
8300 && (fndecl = get_callee_fndecl (inner))
8301 && (DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL, BUILT_IN_EXPECT)
8302 || DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL,
8303 BUILT_IN_EXPECT_WITH_PROBABILITY)))
8304 return arg0;
8306 inner = inner_arg0;
8307 code = TREE_CODE (inner);
8308 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8310 tree op0 = TREE_OPERAND (inner, 0);
8311 tree op1 = TREE_OPERAND (inner, 1);
8312 arg1 = save_expr (arg1);
8314 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8315 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8316 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8318 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8321 /* If the argument isn't invariant then there's nothing else we can do. */
8322 if (!TREE_CONSTANT (inner_arg0))
8323 return NULL_TREE;
8325 /* If we expect that a comparison against the argument will fold to
8326 a constant return the constant. In practice, this means a true
8327 constant or the address of a non-weak symbol. */
8328 inner = inner_arg0;
8329 STRIP_NOPS (inner);
8330 if (TREE_CODE (inner) == ADDR_EXPR)
8334 inner = TREE_OPERAND (inner, 0);
8336 while (TREE_CODE (inner) == COMPONENT_REF
8337 || TREE_CODE (inner) == ARRAY_REF);
8338 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8339 return NULL_TREE;
8342 /* Otherwise, ARG0 already has the proper type for the return value. */
8343 return arg0;
8346 /* Fold a call to __builtin_classify_type with argument ARG. */
8348 static tree
8349 fold_builtin_classify_type (tree arg)
8351 if (arg == 0)
8352 return build_int_cst (integer_type_node, no_type_class);
8354 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8357 /* Fold a call to __builtin_strlen with argument ARG. */
8359 static tree
8360 fold_builtin_strlen (location_t loc, tree type, tree arg)
8362 if (!validate_arg (arg, POINTER_TYPE))
8363 return NULL_TREE;
8364 else
8366 tree len = c_strlen (arg, 0);
8368 if (len)
8369 return fold_convert_loc (loc, type, len);
8371 return NULL_TREE;
8375 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8377 static tree
8378 fold_builtin_inf (location_t loc, tree type, int warn)
8380 REAL_VALUE_TYPE real;
8382 /* __builtin_inff is intended to be usable to define INFINITY on all
8383 targets. If an infinity is not available, INFINITY expands "to a
8384 positive constant of type float that overflows at translation
8385 time", footnote "In this case, using INFINITY will violate the
8386 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8387 Thus we pedwarn to ensure this constraint violation is
8388 diagnosed. */
8389 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8390 pedwarn (loc, 0, "target format does not support infinity");
8392 real_inf (&real);
8393 return build_real (type, real);
8396 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8397 NULL_TREE if no simplification can be made. */
8399 static tree
8400 fold_builtin_sincos (location_t loc,
8401 tree arg0, tree arg1, tree arg2)
8403 tree type;
8404 tree fndecl, call = NULL_TREE;
8406 if (!validate_arg (arg0, REAL_TYPE)
8407 || !validate_arg (arg1, POINTER_TYPE)
8408 || !validate_arg (arg2, POINTER_TYPE))
8409 return NULL_TREE;
8411 type = TREE_TYPE (arg0);
8413 /* Calculate the result when the argument is a constant. */
8414 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8415 if (fn == END_BUILTINS)
8416 return NULL_TREE;
8418 /* Canonicalize sincos to cexpi. */
8419 if (TREE_CODE (arg0) == REAL_CST)
8421 tree complex_type = build_complex_type (type);
8422 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8424 if (!call)
8426 if (!targetm.libc_has_function (function_c99_math_complex)
8427 || !builtin_decl_implicit_p (fn))
8428 return NULL_TREE;
8429 fndecl = builtin_decl_explicit (fn);
8430 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8431 call = builtin_save_expr (call);
8434 tree ptype = build_pointer_type (type);
8435 arg1 = fold_convert (ptype, arg1);
8436 arg2 = fold_convert (ptype, arg2);
8437 return build2 (COMPOUND_EXPR, void_type_node,
8438 build2 (MODIFY_EXPR, void_type_node,
8439 build_fold_indirect_ref_loc (loc, arg1),
8440 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8441 build2 (MODIFY_EXPR, void_type_node,
8442 build_fold_indirect_ref_loc (loc, arg2),
8443 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8446 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8447 Return NULL_TREE if no simplification can be made. */
8449 static tree
8450 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8452 if (!validate_arg (arg1, POINTER_TYPE)
8453 || !validate_arg (arg2, POINTER_TYPE)
8454 || !validate_arg (len, INTEGER_TYPE))
8455 return NULL_TREE;
8457 /* If the LEN parameter is zero, return zero. */
8458 if (integer_zerop (len))
8459 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8460 arg1, arg2);
8462 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8463 if (operand_equal_p (arg1, arg2, 0))
8464 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8466 /* If len parameter is one, return an expression corresponding to
8467 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8468 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8470 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8471 tree cst_uchar_ptr_node
8472 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8474 tree ind1
8475 = fold_convert_loc (loc, integer_type_node,
8476 build1 (INDIRECT_REF, cst_uchar_node,
8477 fold_convert_loc (loc,
8478 cst_uchar_ptr_node,
8479 arg1)));
8480 tree ind2
8481 = fold_convert_loc (loc, integer_type_node,
8482 build1 (INDIRECT_REF, cst_uchar_node,
8483 fold_convert_loc (loc,
8484 cst_uchar_ptr_node,
8485 arg2)));
8486 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8489 return NULL_TREE;
8492 /* Fold a call to builtin isascii with argument ARG. */
8494 static tree
8495 fold_builtin_isascii (location_t loc, tree arg)
8497 if (!validate_arg (arg, INTEGER_TYPE))
8498 return NULL_TREE;
8499 else
8501 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8502 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8503 build_int_cst (integer_type_node,
8504 ~ (unsigned HOST_WIDE_INT) 0x7f));
8505 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8506 arg, integer_zero_node);
8510 /* Fold a call to builtin toascii with argument ARG. */
8512 static tree
8513 fold_builtin_toascii (location_t loc, tree arg)
8515 if (!validate_arg (arg, INTEGER_TYPE))
8516 return NULL_TREE;
8518 /* Transform toascii(c) -> (c & 0x7f). */
8519 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8520 build_int_cst (integer_type_node, 0x7f));
8523 /* Fold a call to builtin isdigit with argument ARG. */
8525 static tree
8526 fold_builtin_isdigit (location_t loc, tree arg)
8528 if (!validate_arg (arg, INTEGER_TYPE))
8529 return NULL_TREE;
8530 else
8532 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8533 /* According to the C standard, isdigit is unaffected by locale.
8534 However, it definitely is affected by the target character set. */
8535 unsigned HOST_WIDE_INT target_digit0
8536 = lang_hooks.to_target_charset ('0');
8538 if (target_digit0 == 0)
8539 return NULL_TREE;
8541 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8542 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8543 build_int_cst (unsigned_type_node, target_digit0));
8544 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8545 build_int_cst (unsigned_type_node, 9));
8549 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8551 static tree
8552 fold_builtin_fabs (location_t loc, tree arg, tree type)
8554 if (!validate_arg (arg, REAL_TYPE))
8555 return NULL_TREE;
8557 arg = fold_convert_loc (loc, type, arg);
8558 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8561 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8563 static tree
8564 fold_builtin_abs (location_t loc, tree arg, tree type)
8566 if (!validate_arg (arg, INTEGER_TYPE))
8567 return NULL_TREE;
8569 arg = fold_convert_loc (loc, type, arg);
8570 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8573 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8575 static tree
8576 fold_builtin_carg (location_t loc, tree arg, tree type)
8578 if (validate_arg (arg, COMPLEX_TYPE)
8579 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8581 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8583 if (atan2_fn)
8585 tree new_arg = builtin_save_expr (arg);
8586 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8587 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8588 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8592 return NULL_TREE;
8595 /* Fold a call to builtin frexp, we can assume the base is 2. */
8597 static tree
8598 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8600 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8601 return NULL_TREE;
8603 STRIP_NOPS (arg0);
8605 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8606 return NULL_TREE;
8608 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8610 /* Proceed if a valid pointer type was passed in. */
8611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8613 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8614 tree frac, exp;
8616 switch (value->cl)
8618 case rvc_zero:
8619 /* For +-0, return (*exp = 0, +-0). */
8620 exp = integer_zero_node;
8621 frac = arg0;
8622 break;
8623 case rvc_nan:
8624 case rvc_inf:
8625 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8626 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8627 case rvc_normal:
8629 /* Since the frexp function always expects base 2, and in
8630 GCC normalized significands are already in the range
8631 [0.5, 1.0), we have exactly what frexp wants. */
8632 REAL_VALUE_TYPE frac_rvt = *value;
8633 SET_REAL_EXP (&frac_rvt, 0);
8634 frac = build_real (rettype, frac_rvt);
8635 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8637 break;
8638 default:
8639 gcc_unreachable ();
8642 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8643 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8644 TREE_SIDE_EFFECTS (arg1) = 1;
8645 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8648 return NULL_TREE;
8651 /* Fold a call to builtin modf. */
8653 static tree
8654 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8656 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8657 return NULL_TREE;
8659 STRIP_NOPS (arg0);
8661 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8662 return NULL_TREE;
8664 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8666 /* Proceed if a valid pointer type was passed in. */
8667 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8669 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8670 REAL_VALUE_TYPE trunc, frac;
8672 switch (value->cl)
8674 case rvc_nan:
8675 case rvc_zero:
8676 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8677 trunc = frac = *value;
8678 break;
8679 case rvc_inf:
8680 /* For +-Inf, return (*arg1 = arg0, +-0). */
8681 frac = dconst0;
8682 frac.sign = value->sign;
8683 trunc = *value;
8684 break;
8685 case rvc_normal:
8686 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8687 real_trunc (&trunc, VOIDmode, value);
8688 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8689 /* If the original number was negative and already
8690 integral, then the fractional part is -0.0. */
8691 if (value->sign && frac.cl == rvc_zero)
8692 frac.sign = value->sign;
8693 break;
8696 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8697 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8698 build_real (rettype, trunc));
8699 TREE_SIDE_EFFECTS (arg1) = 1;
8700 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8701 build_real (rettype, frac));
8704 return NULL_TREE;
8707 /* Given a location LOC, an interclass builtin function decl FNDECL
8708 and its single argument ARG, return an folded expression computing
8709 the same, or NULL_TREE if we either couldn't or didn't want to fold
8710 (the latter happen if there's an RTL instruction available). */
8712 static tree
8713 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8715 machine_mode mode;
8717 if (!validate_arg (arg, REAL_TYPE))
8718 return NULL_TREE;
8720 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8721 return NULL_TREE;
8723 mode = TYPE_MODE (TREE_TYPE (arg));
8725 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8727 /* If there is no optab, try generic code. */
8728 switch (DECL_FUNCTION_CODE (fndecl))
8730 tree result;
8732 CASE_FLT_FN (BUILT_IN_ISINF):
8734 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8735 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8736 tree type = TREE_TYPE (arg);
8737 REAL_VALUE_TYPE r;
8738 char buf[128];
8740 if (is_ibm_extended)
8742 /* NaN and Inf are encoded in the high-order double value
8743 only. The low-order value is not significant. */
8744 type = double_type_node;
8745 mode = DFmode;
8746 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8748 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8749 real_from_string (&r, buf);
8750 result = build_call_expr (isgr_fn, 2,
8751 fold_build1_loc (loc, ABS_EXPR, type, arg),
8752 build_real (type, r));
8753 return result;
8755 CASE_FLT_FN (BUILT_IN_FINITE):
8756 case BUILT_IN_ISFINITE:
8758 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8759 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8760 tree type = TREE_TYPE (arg);
8761 REAL_VALUE_TYPE r;
8762 char buf[128];
8764 if (is_ibm_extended)
8766 /* NaN and Inf are encoded in the high-order double value
8767 only. The low-order value is not significant. */
8768 type = double_type_node;
8769 mode = DFmode;
8770 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8772 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8773 real_from_string (&r, buf);
8774 result = build_call_expr (isle_fn, 2,
8775 fold_build1_loc (loc, ABS_EXPR, type, arg),
8776 build_real (type, r));
8777 /*result = fold_build2_loc (loc, UNGT_EXPR,
8778 TREE_TYPE (TREE_TYPE (fndecl)),
8779 fold_build1_loc (loc, ABS_EXPR, type, arg),
8780 build_real (type, r));
8781 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8782 TREE_TYPE (TREE_TYPE (fndecl)),
8783 result);*/
8784 return result;
8786 case BUILT_IN_ISNORMAL:
8788 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8789 islessequal(fabs(x),DBL_MAX). */
8790 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8791 tree type = TREE_TYPE (arg);
8792 tree orig_arg, max_exp, min_exp;
8793 machine_mode orig_mode = mode;
8794 REAL_VALUE_TYPE rmax, rmin;
8795 char buf[128];
8797 orig_arg = arg = builtin_save_expr (arg);
8798 if (is_ibm_extended)
8800 /* Use double to test the normal range of IBM extended
8801 precision. Emin for IBM extended precision is
8802 different to emin for IEEE double, being 53 higher
8803 since the low double exponent is at least 53 lower
8804 than the high double exponent. */
8805 type = double_type_node;
8806 mode = DFmode;
8807 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8809 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8811 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8812 real_from_string (&rmax, buf);
8813 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8814 real_from_string (&rmin, buf);
8815 max_exp = build_real (type, rmax);
8816 min_exp = build_real (type, rmin);
8818 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8819 if (is_ibm_extended)
8821 /* Testing the high end of the range is done just using
8822 the high double, using the same test as isfinite().
8823 For the subnormal end of the range we first test the
8824 high double, then if its magnitude is equal to the
8825 limit of 0x1p-969, we test whether the low double is
8826 non-zero and opposite sign to the high double. */
8827 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8828 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8829 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8830 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8831 arg, min_exp);
8832 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8833 complex_double_type_node, orig_arg);
8834 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8835 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8836 tree zero = build_real (type, dconst0);
8837 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8838 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8839 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8840 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8841 fold_build3 (COND_EXPR,
8842 integer_type_node,
8843 hilt, logt, lolt));
8844 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8845 eq_min, ok_lo);
8846 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8847 gt_min, eq_min);
8849 else
8851 tree const isge_fn
8852 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8853 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8855 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8856 max_exp, min_exp);
8857 return result;
8859 default:
8860 break;
8863 return NULL_TREE;
8866 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8867 ARG is the argument for the call. */
8869 static tree
8870 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8872 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8874 if (!validate_arg (arg, REAL_TYPE))
8875 return NULL_TREE;
8877 switch (builtin_index)
8879 case BUILT_IN_ISINF:
8880 if (!HONOR_INFINITIES (arg))
8881 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8883 return NULL_TREE;
8885 case BUILT_IN_ISINF_SIGN:
8887 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8888 /* In a boolean context, GCC will fold the inner COND_EXPR to
8889 1. So e.g. "if (isinf_sign(x))" would be folded to just
8890 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8891 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8892 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8893 tree tmp = NULL_TREE;
8895 arg = builtin_save_expr (arg);
8897 if (signbit_fn && isinf_fn)
8899 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8900 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8902 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8903 signbit_call, integer_zero_node);
8904 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8905 isinf_call, integer_zero_node);
8907 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8908 integer_minus_one_node, integer_one_node);
8909 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8910 isinf_call, tmp,
8911 integer_zero_node);
8914 return tmp;
8917 case BUILT_IN_ISFINITE:
8918 if (!HONOR_NANS (arg)
8919 && !HONOR_INFINITIES (arg))
8920 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8922 return NULL_TREE;
8924 case BUILT_IN_ISNAN:
8925 if (!HONOR_NANS (arg))
8926 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8929 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8930 if (is_ibm_extended)
8932 /* NaN and Inf are encoded in the high-order double value
8933 only. The low-order value is not significant. */
8934 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8937 arg = builtin_save_expr (arg);
8938 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8940 default:
8941 gcc_unreachable ();
8945 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8946 This builtin will generate code to return the appropriate floating
8947 point classification depending on the value of the floating point
8948 number passed in. The possible return values must be supplied as
8949 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8950 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8951 one floating point argument which is "type generic". */
8953 static tree
8954 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8956 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8957 arg, type, res, tmp;
8958 machine_mode mode;
8959 REAL_VALUE_TYPE r;
8960 char buf[128];
8962 /* Verify the required arguments in the original call. */
8963 if (nargs != 6
8964 || !validate_arg (args[0], INTEGER_TYPE)
8965 || !validate_arg (args[1], INTEGER_TYPE)
8966 || !validate_arg (args[2], INTEGER_TYPE)
8967 || !validate_arg (args[3], INTEGER_TYPE)
8968 || !validate_arg (args[4], INTEGER_TYPE)
8969 || !validate_arg (args[5], REAL_TYPE))
8970 return NULL_TREE;
8972 fp_nan = args[0];
8973 fp_infinite = args[1];
8974 fp_normal = args[2];
8975 fp_subnormal = args[3];
8976 fp_zero = args[4];
8977 arg = args[5];
8978 type = TREE_TYPE (arg);
8979 mode = TYPE_MODE (type);
8980 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8982 /* fpclassify(x) ->
8983 isnan(x) ? FP_NAN :
8984 (fabs(x) == Inf ? FP_INFINITE :
8985 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8986 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8988 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8989 build_real (type, dconst0));
8990 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8991 tmp, fp_zero, fp_subnormal);
8993 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8994 real_from_string (&r, buf);
8995 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8996 arg, build_real (type, r));
8997 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8999 if (HONOR_INFINITIES (mode))
9001 real_inf (&r);
9002 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9003 build_real (type, r));
9004 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9005 fp_infinite, res);
9008 if (HONOR_NANS (mode))
9010 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9011 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9014 return res;
9017 /* Fold a call to an unordered comparison function such as
9018 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9019 being called and ARG0 and ARG1 are the arguments for the call.
9020 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9021 the opposite of the desired result. UNORDERED_CODE is used
9022 for modes that can hold NaNs and ORDERED_CODE is used for
9023 the rest. */
9025 static tree
9026 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9027 enum tree_code unordered_code,
9028 enum tree_code ordered_code)
9030 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9031 enum tree_code code;
9032 tree type0, type1;
9033 enum tree_code code0, code1;
9034 tree cmp_type = NULL_TREE;
9036 type0 = TREE_TYPE (arg0);
9037 type1 = TREE_TYPE (arg1);
9039 code0 = TREE_CODE (type0);
9040 code1 = TREE_CODE (type1);
9042 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9043 /* Choose the wider of two real types. */
9044 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9045 ? type0 : type1;
9046 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9047 cmp_type = type0;
9048 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9049 cmp_type = type1;
9051 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9052 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9054 if (unordered_code == UNORDERED_EXPR)
9056 if (!HONOR_NANS (arg0))
9057 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9058 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9061 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9062 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9063 fold_build2_loc (loc, code, type, arg0, arg1));
9066 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9067 arithmetics if it can never overflow, or into internal functions that
9068 return both result of arithmetics and overflowed boolean flag in
9069 a complex integer result, or some other check for overflow.
9070 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9071 checking part of that. */
9073 static tree
9074 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9075 tree arg0, tree arg1, tree arg2)
9077 enum internal_fn ifn = IFN_LAST;
9078 /* The code of the expression corresponding to the type-generic
9079 built-in, or ERROR_MARK for the type-specific ones. */
9080 enum tree_code opcode = ERROR_MARK;
9081 bool ovf_only = false;
9083 switch (fcode)
9085 case BUILT_IN_ADD_OVERFLOW_P:
9086 ovf_only = true;
9087 /* FALLTHRU */
9088 case BUILT_IN_ADD_OVERFLOW:
9089 opcode = PLUS_EXPR;
9090 /* FALLTHRU */
9091 case BUILT_IN_SADD_OVERFLOW:
9092 case BUILT_IN_SADDL_OVERFLOW:
9093 case BUILT_IN_SADDLL_OVERFLOW:
9094 case BUILT_IN_UADD_OVERFLOW:
9095 case BUILT_IN_UADDL_OVERFLOW:
9096 case BUILT_IN_UADDLL_OVERFLOW:
9097 ifn = IFN_ADD_OVERFLOW;
9098 break;
9099 case BUILT_IN_SUB_OVERFLOW_P:
9100 ovf_only = true;
9101 /* FALLTHRU */
9102 case BUILT_IN_SUB_OVERFLOW:
9103 opcode = MINUS_EXPR;
9104 /* FALLTHRU */
9105 case BUILT_IN_SSUB_OVERFLOW:
9106 case BUILT_IN_SSUBL_OVERFLOW:
9107 case BUILT_IN_SSUBLL_OVERFLOW:
9108 case BUILT_IN_USUB_OVERFLOW:
9109 case BUILT_IN_USUBL_OVERFLOW:
9110 case BUILT_IN_USUBLL_OVERFLOW:
9111 ifn = IFN_SUB_OVERFLOW;
9112 break;
9113 case BUILT_IN_MUL_OVERFLOW_P:
9114 ovf_only = true;
9115 /* FALLTHRU */
9116 case BUILT_IN_MUL_OVERFLOW:
9117 opcode = MULT_EXPR;
9118 /* FALLTHRU */
9119 case BUILT_IN_SMUL_OVERFLOW:
9120 case BUILT_IN_SMULL_OVERFLOW:
9121 case BUILT_IN_SMULLL_OVERFLOW:
9122 case BUILT_IN_UMUL_OVERFLOW:
9123 case BUILT_IN_UMULL_OVERFLOW:
9124 case BUILT_IN_UMULLL_OVERFLOW:
9125 ifn = IFN_MUL_OVERFLOW;
9126 break;
9127 default:
9128 gcc_unreachable ();
9131 /* For the "generic" overloads, the first two arguments can have different
9132 types and the last argument determines the target type to use to check
9133 for overflow. The arguments of the other overloads all have the same
9134 type. */
9135 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9137 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9138 arguments are constant, attempt to fold the built-in call into a constant
9139 expression indicating whether or not it detected an overflow. */
9140 if (ovf_only
9141 && TREE_CODE (arg0) == INTEGER_CST
9142 && TREE_CODE (arg1) == INTEGER_CST)
9143 /* Perform the computation in the target type and check for overflow. */
9144 return omit_one_operand_loc (loc, boolean_type_node,
9145 arith_overflowed_p (opcode, type, arg0, arg1)
9146 ? boolean_true_node : boolean_false_node,
9147 arg2);
9149 tree ctype = build_complex_type (type);
9150 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9151 2, arg0, arg1);
9152 tree tgt = save_expr (call);
9153 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9154 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9155 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9157 if (ovf_only)
9158 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9160 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9161 tree store
9162 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9163 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9166 /* Fold a call to __builtin_FILE to a constant string. */
9168 static inline tree
9169 fold_builtin_FILE (location_t loc)
9171 if (const char *fname = LOCATION_FILE (loc))
9173 /* The documentation says this builtin is equivalent to the preprocessor
9174 __FILE__ macro so it appears appropriate to use the same file prefix
9175 mappings. */
9176 fname = remap_macro_filename (fname);
9177 return build_string_literal (strlen (fname) + 1, fname);
9180 return build_string_literal (1, "");
9183 /* Fold a call to __builtin_FUNCTION to a constant string. */
9185 static inline tree
9186 fold_builtin_FUNCTION ()
9188 const char *name = "";
9190 if (current_function_decl)
9191 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9193 return build_string_literal (strlen (name) + 1, name);
9196 /* Fold a call to __builtin_LINE to an integer constant. */
9198 static inline tree
9199 fold_builtin_LINE (location_t loc, tree type)
9201 return build_int_cst (type, LOCATION_LINE (loc));
9204 /* Fold a call to built-in function FNDECL with 0 arguments.
9205 This function returns NULL_TREE if no simplification was possible. */
9207 static tree
9208 fold_builtin_0 (location_t loc, tree fndecl)
9210 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9211 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9212 switch (fcode)
9214 case BUILT_IN_FILE:
9215 return fold_builtin_FILE (loc);
9217 case BUILT_IN_FUNCTION:
9218 return fold_builtin_FUNCTION ();
9220 case BUILT_IN_LINE:
9221 return fold_builtin_LINE (loc, type);
9223 CASE_FLT_FN (BUILT_IN_INF):
9224 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9225 case BUILT_IN_INFD32:
9226 case BUILT_IN_INFD64:
9227 case BUILT_IN_INFD128:
9228 return fold_builtin_inf (loc, type, true);
9230 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9231 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9232 return fold_builtin_inf (loc, type, false);
9234 case BUILT_IN_CLASSIFY_TYPE:
9235 return fold_builtin_classify_type (NULL_TREE);
9237 default:
9238 break;
9240 return NULL_TREE;
9243 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9244 This function returns NULL_TREE if no simplification was possible. */
9246 static tree
9247 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9249 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9250 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9252 if (TREE_CODE (arg0) == ERROR_MARK)
9253 return NULL_TREE;
9255 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9256 return ret;
9258 switch (fcode)
9260 case BUILT_IN_CONSTANT_P:
9262 tree val = fold_builtin_constant_p (arg0);
9264 /* Gimplification will pull the CALL_EXPR for the builtin out of
9265 an if condition. When not optimizing, we'll not CSE it back.
9266 To avoid link error types of regressions, return false now. */
9267 if (!val && !optimize)
9268 val = integer_zero_node;
9270 return val;
9273 case BUILT_IN_CLASSIFY_TYPE:
9274 return fold_builtin_classify_type (arg0);
9276 case BUILT_IN_STRLEN:
9277 return fold_builtin_strlen (loc, type, arg0);
9279 CASE_FLT_FN (BUILT_IN_FABS):
9280 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9281 case BUILT_IN_FABSD32:
9282 case BUILT_IN_FABSD64:
9283 case BUILT_IN_FABSD128:
9284 return fold_builtin_fabs (loc, arg0, type);
9286 case BUILT_IN_ABS:
9287 case BUILT_IN_LABS:
9288 case BUILT_IN_LLABS:
9289 case BUILT_IN_IMAXABS:
9290 return fold_builtin_abs (loc, arg0, type);
9292 CASE_FLT_FN (BUILT_IN_CONJ):
9293 if (validate_arg (arg0, COMPLEX_TYPE)
9294 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9295 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9296 break;
9298 CASE_FLT_FN (BUILT_IN_CREAL):
9299 if (validate_arg (arg0, COMPLEX_TYPE)
9300 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9301 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9302 break;
9304 CASE_FLT_FN (BUILT_IN_CIMAG):
9305 if (validate_arg (arg0, COMPLEX_TYPE)
9306 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9307 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9308 break;
9310 CASE_FLT_FN (BUILT_IN_CARG):
9311 return fold_builtin_carg (loc, arg0, type);
9313 case BUILT_IN_ISASCII:
9314 return fold_builtin_isascii (loc, arg0);
9316 case BUILT_IN_TOASCII:
9317 return fold_builtin_toascii (loc, arg0);
9319 case BUILT_IN_ISDIGIT:
9320 return fold_builtin_isdigit (loc, arg0);
9322 CASE_FLT_FN (BUILT_IN_FINITE):
9323 case BUILT_IN_FINITED32:
9324 case BUILT_IN_FINITED64:
9325 case BUILT_IN_FINITED128:
9326 case BUILT_IN_ISFINITE:
9328 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9329 if (ret)
9330 return ret;
9331 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9334 CASE_FLT_FN (BUILT_IN_ISINF):
9335 case BUILT_IN_ISINFD32:
9336 case BUILT_IN_ISINFD64:
9337 case BUILT_IN_ISINFD128:
9339 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9340 if (ret)
9341 return ret;
9342 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9345 case BUILT_IN_ISNORMAL:
9346 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9348 case BUILT_IN_ISINF_SIGN:
9349 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9351 CASE_FLT_FN (BUILT_IN_ISNAN):
9352 case BUILT_IN_ISNAND32:
9353 case BUILT_IN_ISNAND64:
9354 case BUILT_IN_ISNAND128:
9355 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9357 case BUILT_IN_FREE:
9358 if (integer_zerop (arg0))
9359 return build_empty_stmt (loc);
9360 break;
9362 default:
9363 break;
9366 return NULL_TREE;
9370 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9371 This function returns NULL_TREE if no simplification was possible. */
9373 static tree
9374 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9376 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9377 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9379 if (TREE_CODE (arg0) == ERROR_MARK
9380 || TREE_CODE (arg1) == ERROR_MARK)
9381 return NULL_TREE;
9383 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9384 return ret;
9386 switch (fcode)
9388 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9389 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9390 if (validate_arg (arg0, REAL_TYPE)
9391 && validate_arg (arg1, POINTER_TYPE))
9392 return do_mpfr_lgamma_r (arg0, arg1, type);
9393 break;
9395 CASE_FLT_FN (BUILT_IN_FREXP):
9396 return fold_builtin_frexp (loc, arg0, arg1, type);
9398 CASE_FLT_FN (BUILT_IN_MODF):
9399 return fold_builtin_modf (loc, arg0, arg1, type);
9401 case BUILT_IN_STRSPN:
9402 return fold_builtin_strspn (loc, arg0, arg1);
9404 case BUILT_IN_STRCSPN:
9405 return fold_builtin_strcspn (loc, arg0, arg1);
9407 case BUILT_IN_STRPBRK:
9408 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9410 case BUILT_IN_EXPECT:
9411 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9413 case BUILT_IN_ISGREATER:
9414 return fold_builtin_unordered_cmp (loc, fndecl,
9415 arg0, arg1, UNLE_EXPR, LE_EXPR);
9416 case BUILT_IN_ISGREATEREQUAL:
9417 return fold_builtin_unordered_cmp (loc, fndecl,
9418 arg0, arg1, UNLT_EXPR, LT_EXPR);
9419 case BUILT_IN_ISLESS:
9420 return fold_builtin_unordered_cmp (loc, fndecl,
9421 arg0, arg1, UNGE_EXPR, GE_EXPR);
9422 case BUILT_IN_ISLESSEQUAL:
9423 return fold_builtin_unordered_cmp (loc, fndecl,
9424 arg0, arg1, UNGT_EXPR, GT_EXPR);
9425 case BUILT_IN_ISLESSGREATER:
9426 return fold_builtin_unordered_cmp (loc, fndecl,
9427 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9428 case BUILT_IN_ISUNORDERED:
9429 return fold_builtin_unordered_cmp (loc, fndecl,
9430 arg0, arg1, UNORDERED_EXPR,
9431 NOP_EXPR);
9433 /* We do the folding for va_start in the expander. */
9434 case BUILT_IN_VA_START:
9435 break;
9437 case BUILT_IN_OBJECT_SIZE:
9438 return fold_builtin_object_size (arg0, arg1);
9440 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9441 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9443 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9444 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9446 default:
9447 break;
9449 return NULL_TREE;
9452 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9453 and ARG2.
9454 This function returns NULL_TREE if no simplification was possible. */
9456 static tree
9457 fold_builtin_3 (location_t loc, tree fndecl,
9458 tree arg0, tree arg1, tree arg2)
9460 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9461 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9463 if (TREE_CODE (arg0) == ERROR_MARK
9464 || TREE_CODE (arg1) == ERROR_MARK
9465 || TREE_CODE (arg2) == ERROR_MARK)
9466 return NULL_TREE;
9468 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9469 arg0, arg1, arg2))
9470 return ret;
9472 switch (fcode)
9475 CASE_FLT_FN (BUILT_IN_SINCOS):
9476 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9478 CASE_FLT_FN (BUILT_IN_REMQUO):
9479 if (validate_arg (arg0, REAL_TYPE)
9480 && validate_arg (arg1, REAL_TYPE)
9481 && validate_arg (arg2, POINTER_TYPE))
9482 return do_mpfr_remquo (arg0, arg1, arg2);
9483 break;
9485 case BUILT_IN_MEMCMP:
9486 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9488 case BUILT_IN_EXPECT:
9489 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9491 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9492 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9494 case BUILT_IN_ADD_OVERFLOW:
9495 case BUILT_IN_SUB_OVERFLOW:
9496 case BUILT_IN_MUL_OVERFLOW:
9497 case BUILT_IN_ADD_OVERFLOW_P:
9498 case BUILT_IN_SUB_OVERFLOW_P:
9499 case BUILT_IN_MUL_OVERFLOW_P:
9500 case BUILT_IN_SADD_OVERFLOW:
9501 case BUILT_IN_SADDL_OVERFLOW:
9502 case BUILT_IN_SADDLL_OVERFLOW:
9503 case BUILT_IN_SSUB_OVERFLOW:
9504 case BUILT_IN_SSUBL_OVERFLOW:
9505 case BUILT_IN_SSUBLL_OVERFLOW:
9506 case BUILT_IN_SMUL_OVERFLOW:
9507 case BUILT_IN_SMULL_OVERFLOW:
9508 case BUILT_IN_SMULLL_OVERFLOW:
9509 case BUILT_IN_UADD_OVERFLOW:
9510 case BUILT_IN_UADDL_OVERFLOW:
9511 case BUILT_IN_UADDLL_OVERFLOW:
9512 case BUILT_IN_USUB_OVERFLOW:
9513 case BUILT_IN_USUBL_OVERFLOW:
9514 case BUILT_IN_USUBLL_OVERFLOW:
9515 case BUILT_IN_UMUL_OVERFLOW:
9516 case BUILT_IN_UMULL_OVERFLOW:
9517 case BUILT_IN_UMULLL_OVERFLOW:
9518 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9520 default:
9521 break;
9523 return NULL_TREE;
9526 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9527 arguments. IGNORE is true if the result of the
9528 function call is ignored. This function returns NULL_TREE if no
9529 simplification was possible. */
9531 tree
9532 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9534 tree ret = NULL_TREE;
9536 switch (nargs)
9538 case 0:
9539 ret = fold_builtin_0 (loc, fndecl);
9540 break;
9541 case 1:
9542 ret = fold_builtin_1 (loc, fndecl, args[0]);
9543 break;
9544 case 2:
9545 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9546 break;
9547 case 3:
9548 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9549 break;
9550 default:
9551 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9552 break;
9554 if (ret)
9556 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9557 SET_EXPR_LOCATION (ret, loc);
9558 return ret;
9560 return NULL_TREE;
9563 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9564 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9565 of arguments in ARGS to be omitted. OLDNARGS is the number of
9566 elements in ARGS. */
9568 static tree
9569 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9570 int skip, tree fndecl, int n, va_list newargs)
9572 int nargs = oldnargs - skip + n;
9573 tree *buffer;
9575 if (n > 0)
9577 int i, j;
9579 buffer = XALLOCAVEC (tree, nargs);
9580 for (i = 0; i < n; i++)
9581 buffer[i] = va_arg (newargs, tree);
9582 for (j = skip; j < oldnargs; j++, i++)
9583 buffer[i] = args[j];
9585 else
9586 buffer = args + skip;
9588 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9591 /* Return true if FNDECL shouldn't be folded right now.
9592 If a built-in function has an inline attribute always_inline
9593 wrapper, defer folding it after always_inline functions have
9594 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9595 might not be performed. */
9597 bool
9598 avoid_folding_inline_builtin (tree fndecl)
9600 return (DECL_DECLARED_INLINE_P (fndecl)
9601 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9602 && cfun
9603 && !cfun->always_inline_functions_inlined
9604 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9607 /* A wrapper function for builtin folding that prevents warnings for
9608 "statement without effect" and the like, caused by removing the
9609 call node earlier than the warning is generated. */
9611 tree
9612 fold_call_expr (location_t loc, tree exp, bool ignore)
9614 tree ret = NULL_TREE;
9615 tree fndecl = get_callee_fndecl (exp);
9616 if (fndecl
9617 && TREE_CODE (fndecl) == FUNCTION_DECL
9618 && DECL_BUILT_IN (fndecl)
9619 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9620 yet. Defer folding until we see all the arguments
9621 (after inlining). */
9622 && !CALL_EXPR_VA_ARG_PACK (exp))
9624 int nargs = call_expr_nargs (exp);
9626 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9627 instead last argument is __builtin_va_arg_pack (). Defer folding
9628 even in that case, until arguments are finalized. */
9629 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9631 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9632 if (fndecl2
9633 && TREE_CODE (fndecl2) == FUNCTION_DECL
9634 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9635 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9636 return NULL_TREE;
9639 if (avoid_folding_inline_builtin (fndecl))
9640 return NULL_TREE;
9642 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9643 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9644 CALL_EXPR_ARGP (exp), ignore);
9645 else
9647 tree *args = CALL_EXPR_ARGP (exp);
9648 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9649 if (ret)
9650 return ret;
9653 return NULL_TREE;
9656 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9657 N arguments are passed in the array ARGARRAY. Return a folded
9658 expression or NULL_TREE if no simplification was possible. */
9660 tree
9661 fold_builtin_call_array (location_t loc, tree,
9662 tree fn,
9663 int n,
9664 tree *argarray)
9666 if (TREE_CODE (fn) != ADDR_EXPR)
9667 return NULL_TREE;
9669 tree fndecl = TREE_OPERAND (fn, 0);
9670 if (TREE_CODE (fndecl) == FUNCTION_DECL
9671 && DECL_BUILT_IN (fndecl))
9673 /* If last argument is __builtin_va_arg_pack (), arguments to this
9674 function are not finalized yet. Defer folding until they are. */
9675 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9677 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9678 if (fndecl2
9679 && TREE_CODE (fndecl2) == FUNCTION_DECL
9680 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9681 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9682 return NULL_TREE;
9684 if (avoid_folding_inline_builtin (fndecl))
9685 return NULL_TREE;
9686 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9687 return targetm.fold_builtin (fndecl, n, argarray, false);
9688 else
9689 return fold_builtin_n (loc, fndecl, argarray, n, false);
9692 return NULL_TREE;
9695 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9696 along with N new arguments specified as the "..." parameters. SKIP
9697 is the number of arguments in EXP to be omitted. This function is used
9698 to do varargs-to-varargs transformations. */
9700 static tree
9701 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9703 va_list ap;
9704 tree t;
9706 va_start (ap, n);
9707 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9708 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9709 va_end (ap);
9711 return t;
9714 /* Validate a single argument ARG against a tree code CODE representing
9715 a type. Return true when argument is valid. */
9717 static bool
9718 validate_arg (const_tree arg, enum tree_code code)
9720 if (!arg)
9721 return false;
9722 else if (code == POINTER_TYPE)
9723 return POINTER_TYPE_P (TREE_TYPE (arg));
9724 else if (code == INTEGER_TYPE)
9725 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9726 return code == TREE_CODE (TREE_TYPE (arg));
9729 /* This function validates the types of a function call argument list
9730 against a specified list of tree_codes. If the last specifier is a 0,
9731 that represents an ellipses, otherwise the last specifier must be a
9732 VOID_TYPE.
9734 This is the GIMPLE version of validate_arglist. Eventually we want to
9735 completely convert builtins.c to work from GIMPLEs and the tree based
9736 validate_arglist will then be removed. */
9738 bool
9739 validate_gimple_arglist (const gcall *call, ...)
9741 enum tree_code code;
9742 bool res = 0;
9743 va_list ap;
9744 const_tree arg;
9745 size_t i;
9747 va_start (ap, call);
9748 i = 0;
9752 code = (enum tree_code) va_arg (ap, int);
9753 switch (code)
9755 case 0:
9756 /* This signifies an ellipses, any further arguments are all ok. */
9757 res = true;
9758 goto end;
9759 case VOID_TYPE:
9760 /* This signifies an endlink, if no arguments remain, return
9761 true, otherwise return false. */
9762 res = (i == gimple_call_num_args (call));
9763 goto end;
9764 default:
9765 /* If no parameters remain or the parameter's code does not
9766 match the specified code, return false. Otherwise continue
9767 checking any remaining arguments. */
9768 arg = gimple_call_arg (call, i++);
9769 if (!validate_arg (arg, code))
9770 goto end;
9771 break;
9774 while (1);
9776 /* We need gotos here since we can only have one VA_CLOSE in a
9777 function. */
9778 end: ;
9779 va_end (ap);
9781 return res;
9784 /* Default target-specific builtin expander that does nothing. */
9787 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9788 rtx target ATTRIBUTE_UNUSED,
9789 rtx subtarget ATTRIBUTE_UNUSED,
9790 machine_mode mode ATTRIBUTE_UNUSED,
9791 int ignore ATTRIBUTE_UNUSED)
9793 return NULL_RTX;
9796 /* Returns true is EXP represents data that would potentially reside
9797 in a readonly section. */
9799 bool
9800 readonly_data_expr (tree exp)
9802 STRIP_NOPS (exp);
9804 if (TREE_CODE (exp) != ADDR_EXPR)
9805 return false;
9807 exp = get_base_address (TREE_OPERAND (exp, 0));
9808 if (!exp)
9809 return false;
9811 /* Make sure we call decl_readonly_section only for trees it
9812 can handle (since it returns true for everything it doesn't
9813 understand). */
9814 if (TREE_CODE (exp) == STRING_CST
9815 || TREE_CODE (exp) == CONSTRUCTOR
9816 || (VAR_P (exp) && TREE_STATIC (exp)))
9817 return decl_readonly_section (exp, 0);
9818 else
9819 return false;
9822 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9823 to the call, and TYPE is its return type.
9825 Return NULL_TREE if no simplification was possible, otherwise return the
9826 simplified form of the call as a tree.
9828 The simplified form may be a constant or other expression which
9829 computes the same value, but in a more efficient manner (including
9830 calls to other builtin functions).
9832 The call may contain arguments which need to be evaluated, but
9833 which are not useful to determine the result of the call. In
9834 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9835 COMPOUND_EXPR will be an argument which must be evaluated.
9836 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9837 COMPOUND_EXPR in the chain will contain the tree for the simplified
9838 form of the builtin function call. */
9840 static tree
9841 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9843 if (!validate_arg (s1, POINTER_TYPE)
9844 || !validate_arg (s2, POINTER_TYPE))
9845 return NULL_TREE;
9846 else
9848 tree fn;
9849 const char *p1, *p2;
9851 p2 = c_getstr (s2);
9852 if (p2 == NULL)
9853 return NULL_TREE;
9855 p1 = c_getstr (s1);
9856 if (p1 != NULL)
9858 const char *r = strpbrk (p1, p2);
9859 tree tem;
9861 if (r == NULL)
9862 return build_int_cst (TREE_TYPE (s1), 0);
9864 /* Return an offset into the constant string argument. */
9865 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9866 return fold_convert_loc (loc, type, tem);
9869 if (p2[0] == '\0')
9870 /* strpbrk(x, "") == NULL.
9871 Evaluate and ignore s1 in case it had side-effects. */
9872 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9874 if (p2[1] != '\0')
9875 return NULL_TREE; /* Really call strpbrk. */
9877 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9878 if (!fn)
9879 return NULL_TREE;
9881 /* New argument list transforming strpbrk(s1, s2) to
9882 strchr(s1, s2[0]). */
9883 return build_call_expr_loc (loc, fn, 2, s1,
9884 build_int_cst (integer_type_node, p2[0]));
9888 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9889 to the call.
9891 Return NULL_TREE if no simplification was possible, otherwise return the
9892 simplified form of the call as a tree.
9894 The simplified form may be a constant or other expression which
9895 computes the same value, but in a more efficient manner (including
9896 calls to other builtin functions).
9898 The call may contain arguments which need to be evaluated, but
9899 which are not useful to determine the result of the call. In
9900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9901 COMPOUND_EXPR will be an argument which must be evaluated.
9902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9903 COMPOUND_EXPR in the chain will contain the tree for the simplified
9904 form of the builtin function call. */
9906 static tree
9907 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9909 if (!validate_arg (s1, POINTER_TYPE)
9910 || !validate_arg (s2, POINTER_TYPE))
9911 return NULL_TREE;
9912 else
9914 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9916 /* If either argument is "", return NULL_TREE. */
9917 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9918 /* Evaluate and ignore both arguments in case either one has
9919 side-effects. */
9920 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9921 s1, s2);
9922 return NULL_TREE;
9926 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9927 to the call.
9929 Return NULL_TREE if no simplification was possible, otherwise return the
9930 simplified form of the call as a tree.
9932 The simplified form may be a constant or other expression which
9933 computes the same value, but in a more efficient manner (including
9934 calls to other builtin functions).
9936 The call may contain arguments which need to be evaluated, but
9937 which are not useful to determine the result of the call. In
9938 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9939 COMPOUND_EXPR will be an argument which must be evaluated.
9940 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9941 COMPOUND_EXPR in the chain will contain the tree for the simplified
9942 form of the builtin function call. */
9944 static tree
9945 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9947 if (!validate_arg (s1, POINTER_TYPE)
9948 || !validate_arg (s2, POINTER_TYPE))
9949 return NULL_TREE;
9950 else
9952 /* If the first argument is "", return NULL_TREE. */
9953 const char *p1 = c_getstr (s1);
9954 if (p1 && *p1 == '\0')
9956 /* Evaluate and ignore argument s2 in case it has
9957 side-effects. */
9958 return omit_one_operand_loc (loc, size_type_node,
9959 size_zero_node, s2);
9962 /* If the second argument is "", return __builtin_strlen(s1). */
9963 const char *p2 = c_getstr (s2);
9964 if (p2 && *p2 == '\0')
9966 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9968 /* If the replacement _DECL isn't initialized, don't do the
9969 transformation. */
9970 if (!fn)
9971 return NULL_TREE;
9973 return build_call_expr_loc (loc, fn, 1, s1);
9975 return NULL_TREE;
9979 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9980 produced. False otherwise. This is done so that we don't output the error
9981 or warning twice or three times. */
9983 bool
9984 fold_builtin_next_arg (tree exp, bool va_start_p)
9986 tree fntype = TREE_TYPE (current_function_decl);
9987 int nargs = call_expr_nargs (exp);
9988 tree arg;
9989 /* There is good chance the current input_location points inside the
9990 definition of the va_start macro (perhaps on the token for
9991 builtin) in a system header, so warnings will not be emitted.
9992 Use the location in real source code. */
9993 source_location current_location =
9994 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9995 NULL);
9997 if (!stdarg_p (fntype))
9999 error ("%<va_start%> used in function with fixed args");
10000 return true;
10003 if (va_start_p)
10005 if (va_start_p && (nargs != 2))
10007 error ("wrong number of arguments to function %<va_start%>");
10008 return true;
10010 arg = CALL_EXPR_ARG (exp, 1);
10012 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10013 when we checked the arguments and if needed issued a warning. */
10014 else
10016 if (nargs == 0)
10018 /* Evidently an out of date version of <stdarg.h>; can't validate
10019 va_start's second argument, but can still work as intended. */
10020 warning_at (current_location,
10021 OPT_Wvarargs,
10022 "%<__builtin_next_arg%> called without an argument");
10023 return true;
10025 else if (nargs > 1)
10027 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10028 return true;
10030 arg = CALL_EXPR_ARG (exp, 0);
10033 if (TREE_CODE (arg) == SSA_NAME)
10034 arg = SSA_NAME_VAR (arg);
10036 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10037 or __builtin_next_arg (0) the first time we see it, after checking
10038 the arguments and if needed issuing a warning. */
10039 if (!integer_zerop (arg))
10041 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10043 /* Strip off all nops for the sake of the comparison. This
10044 is not quite the same as STRIP_NOPS. It does more.
10045 We must also strip off INDIRECT_EXPR for C++ reference
10046 parameters. */
10047 while (CONVERT_EXPR_P (arg)
10048 || TREE_CODE (arg) == INDIRECT_REF)
10049 arg = TREE_OPERAND (arg, 0);
10050 if (arg != last_parm)
10052 /* FIXME: Sometimes with the tree optimizers we can get the
10053 not the last argument even though the user used the last
10054 argument. We just warn and set the arg to be the last
10055 argument so that we will get wrong-code because of
10056 it. */
10057 warning_at (current_location,
10058 OPT_Wvarargs,
10059 "second parameter of %<va_start%> not last named argument");
10062 /* Undefined by C99 7.15.1.4p4 (va_start):
10063 "If the parameter parmN is declared with the register storage
10064 class, with a function or array type, or with a type that is
10065 not compatible with the type that results after application of
10066 the default argument promotions, the behavior is undefined."
10068 else if (DECL_REGISTER (arg))
10070 warning_at (current_location,
10071 OPT_Wvarargs,
10072 "undefined behavior when second parameter of "
10073 "%<va_start%> is declared with %<register%> storage");
10076 /* We want to verify the second parameter just once before the tree
10077 optimizers are run and then avoid keeping it in the tree,
10078 as otherwise we could warn even for correct code like:
10079 void foo (int i, ...)
10080 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10081 if (va_start_p)
10082 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10083 else
10084 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10086 return false;
10090 /* Expand a call EXP to __builtin_object_size. */
10092 static rtx
10093 expand_builtin_object_size (tree exp)
10095 tree ost;
10096 int object_size_type;
10097 tree fndecl = get_callee_fndecl (exp);
10099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10101 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10102 exp, fndecl);
10103 expand_builtin_trap ();
10104 return const0_rtx;
10107 ost = CALL_EXPR_ARG (exp, 1);
10108 STRIP_NOPS (ost);
10110 if (TREE_CODE (ost) != INTEGER_CST
10111 || tree_int_cst_sgn (ost) < 0
10112 || compare_tree_int (ost, 3) > 0)
10114 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10115 exp, fndecl);
10116 expand_builtin_trap ();
10117 return const0_rtx;
10120 object_size_type = tree_to_shwi (ost);
10122 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10125 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10126 FCODE is the BUILT_IN_* to use.
10127 Return NULL_RTX if we failed; the caller should emit a normal call,
10128 otherwise try to get the result in TARGET, if convenient (and in
10129 mode MODE if that's convenient). */
10131 static rtx
10132 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10133 enum built_in_function fcode)
10135 if (!validate_arglist (exp,
10136 POINTER_TYPE,
10137 fcode == BUILT_IN_MEMSET_CHK
10138 ? INTEGER_TYPE : POINTER_TYPE,
10139 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10140 return NULL_RTX;
10142 tree dest = CALL_EXPR_ARG (exp, 0);
10143 tree src = CALL_EXPR_ARG (exp, 1);
10144 tree len = CALL_EXPR_ARG (exp, 2);
10145 tree size = CALL_EXPR_ARG (exp, 3);
10147 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10148 /*str=*/NULL_TREE, size);
10150 if (!tree_fits_uhwi_p (size))
10151 return NULL_RTX;
10153 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10155 /* Avoid transforming the checking call to an ordinary one when
10156 an overflow has been detected or when the call couldn't be
10157 validated because the size is not constant. */
10158 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10159 return NULL_RTX;
10161 tree fn = NULL_TREE;
10162 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10163 mem{cpy,pcpy,move,set} is available. */
10164 switch (fcode)
10166 case BUILT_IN_MEMCPY_CHK:
10167 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10168 break;
10169 case BUILT_IN_MEMPCPY_CHK:
10170 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10171 break;
10172 case BUILT_IN_MEMMOVE_CHK:
10173 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10174 break;
10175 case BUILT_IN_MEMSET_CHK:
10176 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10177 break;
10178 default:
10179 break;
10182 if (! fn)
10183 return NULL_RTX;
10185 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10186 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10187 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10188 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10190 else if (fcode == BUILT_IN_MEMSET_CHK)
10191 return NULL_RTX;
10192 else
10194 unsigned int dest_align = get_pointer_alignment (dest);
10196 /* If DEST is not a pointer type, call the normal function. */
10197 if (dest_align == 0)
10198 return NULL_RTX;
10200 /* If SRC and DEST are the same (and not volatile), do nothing. */
10201 if (operand_equal_p (src, dest, 0))
10203 tree expr;
10205 if (fcode != BUILT_IN_MEMPCPY_CHK)
10207 /* Evaluate and ignore LEN in case it has side-effects. */
10208 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10209 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10212 expr = fold_build_pointer_plus (dest, len);
10213 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10216 /* __memmove_chk special case. */
10217 if (fcode == BUILT_IN_MEMMOVE_CHK)
10219 unsigned int src_align = get_pointer_alignment (src);
10221 if (src_align == 0)
10222 return NULL_RTX;
10224 /* If src is categorized for a readonly section we can use
10225 normal __memcpy_chk. */
10226 if (readonly_data_expr (src))
10228 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10229 if (!fn)
10230 return NULL_RTX;
10231 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10232 dest, src, len, size);
10233 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10234 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10235 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10238 return NULL_RTX;
10242 /* Emit warning if a buffer overflow is detected at compile time. */
10244 static void
10245 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10247 /* The source string. */
10248 tree srcstr = NULL_TREE;
10249 /* The size of the destination object. */
10250 tree objsize = NULL_TREE;
10251 /* The string that is being concatenated with (as in __strcat_chk)
10252 or null if it isn't. */
10253 tree catstr = NULL_TREE;
10254 /* The maximum length of the source sequence in a bounded operation
10255 (such as __strncat_chk) or null if the operation isn't bounded
10256 (such as __strcat_chk). */
10257 tree maxread = NULL_TREE;
10258 /* The exact size of the access (such as in __strncpy_chk). */
10259 tree size = NULL_TREE;
10261 switch (fcode)
10263 case BUILT_IN_STRCPY_CHK:
10264 case BUILT_IN_STPCPY_CHK:
10265 srcstr = CALL_EXPR_ARG (exp, 1);
10266 objsize = CALL_EXPR_ARG (exp, 2);
10267 break;
10269 case BUILT_IN_STRCAT_CHK:
10270 /* For __strcat_chk the warning will be emitted only if overflowing
10271 by at least strlen (dest) + 1 bytes. */
10272 catstr = CALL_EXPR_ARG (exp, 0);
10273 srcstr = CALL_EXPR_ARG (exp, 1);
10274 objsize = CALL_EXPR_ARG (exp, 2);
10275 break;
10277 case BUILT_IN_STRNCAT_CHK:
10278 catstr = CALL_EXPR_ARG (exp, 0);
10279 srcstr = CALL_EXPR_ARG (exp, 1);
10280 maxread = CALL_EXPR_ARG (exp, 2);
10281 objsize = CALL_EXPR_ARG (exp, 3);
10282 break;
10284 case BUILT_IN_STRNCPY_CHK:
10285 case BUILT_IN_STPNCPY_CHK:
10286 srcstr = CALL_EXPR_ARG (exp, 1);
10287 size = CALL_EXPR_ARG (exp, 2);
10288 objsize = CALL_EXPR_ARG (exp, 3);
10289 break;
10291 case BUILT_IN_SNPRINTF_CHK:
10292 case BUILT_IN_VSNPRINTF_CHK:
10293 maxread = CALL_EXPR_ARG (exp, 1);
10294 objsize = CALL_EXPR_ARG (exp, 3);
10295 break;
10296 default:
10297 gcc_unreachable ();
10300 if (catstr && maxread)
10302 /* Check __strncat_chk. There is no way to determine the length
10303 of the string to which the source string is being appended so
10304 just warn when the length of the source string is not known. */
10305 check_strncat_sizes (exp, objsize);
10306 return;
10309 /* The destination argument is the first one for all built-ins above. */
10310 tree dst = CALL_EXPR_ARG (exp, 0);
10312 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10315 /* Emit warning if a buffer overflow is detected at compile time
10316 in __sprintf_chk/__vsprintf_chk calls. */
10318 static void
10319 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10321 tree size, len, fmt;
10322 const char *fmt_str;
10323 int nargs = call_expr_nargs (exp);
10325 /* Verify the required arguments in the original call. */
10327 if (nargs < 4)
10328 return;
10329 size = CALL_EXPR_ARG (exp, 2);
10330 fmt = CALL_EXPR_ARG (exp, 3);
10332 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10333 return;
10335 /* Check whether the format is a literal string constant. */
10336 fmt_str = c_getstr (fmt);
10337 if (fmt_str == NULL)
10338 return;
10340 if (!init_target_chars ())
10341 return;
10343 /* If the format doesn't contain % args or %%, we know its size. */
10344 if (strchr (fmt_str, target_percent) == 0)
10345 len = build_int_cstu (size_type_node, strlen (fmt_str));
10346 /* If the format is "%s" and first ... argument is a string literal,
10347 we know it too. */
10348 else if (fcode == BUILT_IN_SPRINTF_CHK
10349 && strcmp (fmt_str, target_percent_s) == 0)
10351 tree arg;
10353 if (nargs < 5)
10354 return;
10355 arg = CALL_EXPR_ARG (exp, 4);
10356 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10357 return;
10359 len = c_strlen (arg, 1);
10360 if (!len || ! tree_fits_uhwi_p (len))
10361 return;
10363 else
10364 return;
10366 /* Add one for the terminating nul. */
10367 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10369 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10370 /*maxread=*/NULL_TREE, len, size);
10373 /* Emit warning if a free is called with address of a variable. */
10375 static void
10376 maybe_emit_free_warning (tree exp)
10378 tree arg = CALL_EXPR_ARG (exp, 0);
10380 STRIP_NOPS (arg);
10381 if (TREE_CODE (arg) != ADDR_EXPR)
10382 return;
10384 arg = get_base_address (TREE_OPERAND (arg, 0));
10385 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10386 return;
10388 if (SSA_VAR_P (arg))
10389 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10390 "%Kattempt to free a non-heap object %qD", exp, arg);
10391 else
10392 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10393 "%Kattempt to free a non-heap object", exp);
10396 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10397 if possible. */
10399 static tree
10400 fold_builtin_object_size (tree ptr, tree ost)
10402 unsigned HOST_WIDE_INT bytes;
10403 int object_size_type;
10405 if (!validate_arg (ptr, POINTER_TYPE)
10406 || !validate_arg (ost, INTEGER_TYPE))
10407 return NULL_TREE;
10409 STRIP_NOPS (ost);
10411 if (TREE_CODE (ost) != INTEGER_CST
10412 || tree_int_cst_sgn (ost) < 0
10413 || compare_tree_int (ost, 3) > 0)
10414 return NULL_TREE;
10416 object_size_type = tree_to_shwi (ost);
10418 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10419 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10420 and (size_t) 0 for types 2 and 3. */
10421 if (TREE_SIDE_EFFECTS (ptr))
10422 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10424 if (TREE_CODE (ptr) == ADDR_EXPR)
10426 compute_builtin_object_size (ptr, object_size_type, &bytes);
10427 if (wi::fits_to_tree_p (bytes, size_type_node))
10428 return build_int_cstu (size_type_node, bytes);
10430 else if (TREE_CODE (ptr) == SSA_NAME)
10432 /* If object size is not known yet, delay folding until
10433 later. Maybe subsequent passes will help determining
10434 it. */
10435 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10436 && wi::fits_to_tree_p (bytes, size_type_node))
10437 return build_int_cstu (size_type_node, bytes);
10440 return NULL_TREE;
10443 /* Builtins with folding operations that operate on "..." arguments
10444 need special handling; we need to store the arguments in a convenient
10445 data structure before attempting any folding. Fortunately there are
10446 only a few builtins that fall into this category. FNDECL is the
10447 function, EXP is the CALL_EXPR for the call. */
10449 static tree
10450 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10453 tree ret = NULL_TREE;
10455 switch (fcode)
10457 case BUILT_IN_FPCLASSIFY:
10458 ret = fold_builtin_fpclassify (loc, args, nargs);
10459 break;
10461 default:
10462 break;
10464 if (ret)
10466 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10467 SET_EXPR_LOCATION (ret, loc);
10468 TREE_NO_WARNING (ret) = 1;
10469 return ret;
10471 return NULL_TREE;
10474 /* Initialize format string characters in the target charset. */
10476 bool
10477 init_target_chars (void)
10479 static bool init;
10480 if (!init)
10482 target_newline = lang_hooks.to_target_charset ('\n');
10483 target_percent = lang_hooks.to_target_charset ('%');
10484 target_c = lang_hooks.to_target_charset ('c');
10485 target_s = lang_hooks.to_target_charset ('s');
10486 if (target_newline == 0 || target_percent == 0 || target_c == 0
10487 || target_s == 0)
10488 return false;
10490 target_percent_c[0] = target_percent;
10491 target_percent_c[1] = target_c;
10492 target_percent_c[2] = '\0';
10494 target_percent_s[0] = target_percent;
10495 target_percent_s[1] = target_s;
10496 target_percent_s[2] = '\0';
10498 target_percent_s_newline[0] = target_percent;
10499 target_percent_s_newline[1] = target_s;
10500 target_percent_s_newline[2] = target_newline;
10501 target_percent_s_newline[3] = '\0';
10503 init = true;
10505 return true;
10508 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10509 and no overflow/underflow occurred. INEXACT is true if M was not
10510 exactly calculated. TYPE is the tree type for the result. This
10511 function assumes that you cleared the MPFR flags and then
10512 calculated M to see if anything subsequently set a flag prior to
10513 entering this function. Return NULL_TREE if any checks fail. */
10515 static tree
10516 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10518 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10519 overflow/underflow occurred. If -frounding-math, proceed iff the
10520 result of calling FUNC was exact. */
10521 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10522 && (!flag_rounding_math || !inexact))
10524 REAL_VALUE_TYPE rr;
10526 real_from_mpfr (&rr, m, type, GMP_RNDN);
10527 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10528 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10529 but the mpft_t is not, then we underflowed in the
10530 conversion. */
10531 if (real_isfinite (&rr)
10532 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10534 REAL_VALUE_TYPE rmode;
10536 real_convert (&rmode, TYPE_MODE (type), &rr);
10537 /* Proceed iff the specified mode can hold the value. */
10538 if (real_identical (&rmode, &rr))
10539 return build_real (type, rmode);
10542 return NULL_TREE;
10545 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10546 number and no overflow/underflow occurred. INEXACT is true if M
10547 was not exactly calculated. TYPE is the tree type for the result.
10548 This function assumes that you cleared the MPFR flags and then
10549 calculated M to see if anything subsequently set a flag prior to
10550 entering this function. Return NULL_TREE if any checks fail, if
10551 FORCE_CONVERT is true, then bypass the checks. */
10553 static tree
10554 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10556 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10557 overflow/underflow occurred. If -frounding-math, proceed iff the
10558 result of calling FUNC was exact. */
10559 if (force_convert
10560 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10561 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10562 && (!flag_rounding_math || !inexact)))
10564 REAL_VALUE_TYPE re, im;
10566 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10567 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10568 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10569 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10570 but the mpft_t is not, then we underflowed in the
10571 conversion. */
10572 if (force_convert
10573 || (real_isfinite (&re) && real_isfinite (&im)
10574 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10575 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10577 REAL_VALUE_TYPE re_mode, im_mode;
10579 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10580 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10581 /* Proceed iff the specified mode can hold the value. */
10582 if (force_convert
10583 || (real_identical (&re_mode, &re)
10584 && real_identical (&im_mode, &im)))
10585 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10586 build_real (TREE_TYPE (type), im_mode));
10589 return NULL_TREE;
10592 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10593 the pointer *(ARG_QUO) and return the result. The type is taken
10594 from the type of ARG0 and is used for setting the precision of the
10595 calculation and results. */
10597 static tree
10598 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10600 tree const type = TREE_TYPE (arg0);
10601 tree result = NULL_TREE;
10603 STRIP_NOPS (arg0);
10604 STRIP_NOPS (arg1);
10606 /* To proceed, MPFR must exactly represent the target floating point
10607 format, which only happens when the target base equals two. */
10608 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10609 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10610 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10612 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10613 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10615 if (real_isfinite (ra0) && real_isfinite (ra1))
10617 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10618 const int prec = fmt->p;
10619 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10620 tree result_rem;
10621 long integer_quo;
10622 mpfr_t m0, m1;
10624 mpfr_inits2 (prec, m0, m1, NULL);
10625 mpfr_from_real (m0, ra0, GMP_RNDN);
10626 mpfr_from_real (m1, ra1, GMP_RNDN);
10627 mpfr_clear_flags ();
10628 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10629 /* Remquo is independent of the rounding mode, so pass
10630 inexact=0 to do_mpfr_ckconv(). */
10631 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10632 mpfr_clears (m0, m1, NULL);
10633 if (result_rem)
10635 /* MPFR calculates quo in the host's long so it may
10636 return more bits in quo than the target int can hold
10637 if sizeof(host long) > sizeof(target int). This can
10638 happen even for native compilers in LP64 mode. In
10639 these cases, modulo the quo value with the largest
10640 number that the target int can hold while leaving one
10641 bit for the sign. */
10642 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10643 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10645 /* Dereference the quo pointer argument. */
10646 arg_quo = build_fold_indirect_ref (arg_quo);
10647 /* Proceed iff a valid pointer type was passed in. */
10648 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10650 /* Set the value. */
10651 tree result_quo
10652 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10653 build_int_cst (TREE_TYPE (arg_quo),
10654 integer_quo));
10655 TREE_SIDE_EFFECTS (result_quo) = 1;
10656 /* Combine the quo assignment with the rem. */
10657 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10658 result_quo, result_rem));
10663 return result;
10666 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10667 resulting value as a tree with type TYPE. The mpfr precision is
10668 set to the precision of TYPE. We assume that this mpfr function
10669 returns zero if the result could be calculated exactly within the
10670 requested precision. In addition, the integer pointer represented
10671 by ARG_SG will be dereferenced and set to the appropriate signgam
10672 (-1,1) value. */
10674 static tree
10675 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10677 tree result = NULL_TREE;
10679 STRIP_NOPS (arg);
10681 /* To proceed, MPFR must exactly represent the target floating point
10682 format, which only happens when the target base equals two. Also
10683 verify ARG is a constant and that ARG_SG is an int pointer. */
10684 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10685 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10686 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10687 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10689 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10691 /* In addition to NaN and Inf, the argument cannot be zero or a
10692 negative integer. */
10693 if (real_isfinite (ra)
10694 && ra->cl != rvc_zero
10695 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10697 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10698 const int prec = fmt->p;
10699 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10700 int inexact, sg;
10701 mpfr_t m;
10702 tree result_lg;
10704 mpfr_init2 (m, prec);
10705 mpfr_from_real (m, ra, GMP_RNDN);
10706 mpfr_clear_flags ();
10707 inexact = mpfr_lgamma (m, &sg, m, rnd);
10708 result_lg = do_mpfr_ckconv (m, type, inexact);
10709 mpfr_clear (m);
10710 if (result_lg)
10712 tree result_sg;
10714 /* Dereference the arg_sg pointer argument. */
10715 arg_sg = build_fold_indirect_ref (arg_sg);
10716 /* Assign the signgam value into *arg_sg. */
10717 result_sg = fold_build2 (MODIFY_EXPR,
10718 TREE_TYPE (arg_sg), arg_sg,
10719 build_int_cst (TREE_TYPE (arg_sg), sg));
10720 TREE_SIDE_EFFECTS (result_sg) = 1;
10721 /* Combine the signgam assignment with the lgamma result. */
10722 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10723 result_sg, result_lg));
10728 return result;
10731 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10732 mpc function FUNC on it and return the resulting value as a tree
10733 with type TYPE. The mpfr precision is set to the precision of
10734 TYPE. We assume that function FUNC returns zero if the result
10735 could be calculated exactly within the requested precision. If
10736 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10737 in the arguments and/or results. */
10739 tree
10740 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10741 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10743 tree result = NULL_TREE;
10745 STRIP_NOPS (arg0);
10746 STRIP_NOPS (arg1);
10748 /* To proceed, MPFR must exactly represent the target floating point
10749 format, which only happens when the target base equals two. */
10750 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10752 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10754 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10756 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10757 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10758 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10759 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10761 if (do_nonfinite
10762 || (real_isfinite (re0) && real_isfinite (im0)
10763 && real_isfinite (re1) && real_isfinite (im1)))
10765 const struct real_format *const fmt =
10766 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10767 const int prec = fmt->p;
10768 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10769 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10770 int inexact;
10771 mpc_t m0, m1;
10773 mpc_init2 (m0, prec);
10774 mpc_init2 (m1, prec);
10775 mpfr_from_real (mpc_realref (m0), re0, rnd);
10776 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10777 mpfr_from_real (mpc_realref (m1), re1, rnd);
10778 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10779 mpfr_clear_flags ();
10780 inexact = func (m0, m0, m1, crnd);
10781 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10782 mpc_clear (m0);
10783 mpc_clear (m1);
10787 return result;
10790 /* A wrapper function for builtin folding that prevents warnings for
10791 "statement without effect" and the like, caused by removing the
10792 call node earlier than the warning is generated. */
10794 tree
10795 fold_call_stmt (gcall *stmt, bool ignore)
10797 tree ret = NULL_TREE;
10798 tree fndecl = gimple_call_fndecl (stmt);
10799 location_t loc = gimple_location (stmt);
10800 if (fndecl
10801 && TREE_CODE (fndecl) == FUNCTION_DECL
10802 && DECL_BUILT_IN (fndecl)
10803 && !gimple_call_va_arg_pack_p (stmt))
10805 int nargs = gimple_call_num_args (stmt);
10806 tree *args = (nargs > 0
10807 ? gimple_call_arg_ptr (stmt, 0)
10808 : &error_mark_node);
10810 if (avoid_folding_inline_builtin (fndecl))
10811 return NULL_TREE;
10812 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10814 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10816 else
10818 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10819 if (ret)
10821 /* Propagate location information from original call to
10822 expansion of builtin. Otherwise things like
10823 maybe_emit_chk_warning, that operate on the expansion
10824 of a builtin, will use the wrong location information. */
10825 if (gimple_has_location (stmt))
10827 tree realret = ret;
10828 if (TREE_CODE (ret) == NOP_EXPR)
10829 realret = TREE_OPERAND (ret, 0);
10830 if (CAN_HAVE_LOCATION_P (realret)
10831 && !EXPR_HAS_LOCATION (realret))
10832 SET_EXPR_LOCATION (realret, loc);
10833 return realret;
10835 return ret;
10839 return NULL_TREE;
10842 /* Look up the function in builtin_decl that corresponds to DECL
10843 and set ASMSPEC as its user assembler name. DECL must be a
10844 function decl that declares a builtin. */
10846 void
10847 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10849 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10850 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10851 && asmspec != 0);
10853 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10854 set_user_assembler_name (builtin, asmspec);
10856 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10857 && INT_TYPE_SIZE < BITS_PER_WORD)
10859 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10860 set_user_assembler_libfunc ("ffs", asmspec);
10861 set_optab_libfunc (ffs_optab, mode, "ffs");
10865 /* Return true if DECL is a builtin that expands to a constant or similarly
10866 simple code. */
10867 bool
10868 is_simple_builtin (tree decl)
10870 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10871 switch (DECL_FUNCTION_CODE (decl))
10873 /* Builtins that expand to constants. */
10874 case BUILT_IN_CONSTANT_P:
10875 case BUILT_IN_EXPECT:
10876 case BUILT_IN_OBJECT_SIZE:
10877 case BUILT_IN_UNREACHABLE:
10878 /* Simple register moves or loads from stack. */
10879 case BUILT_IN_ASSUME_ALIGNED:
10880 case BUILT_IN_RETURN_ADDRESS:
10881 case BUILT_IN_EXTRACT_RETURN_ADDR:
10882 case BUILT_IN_FROB_RETURN_ADDR:
10883 case BUILT_IN_RETURN:
10884 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10885 case BUILT_IN_FRAME_ADDRESS:
10886 case BUILT_IN_VA_END:
10887 case BUILT_IN_STACK_SAVE:
10888 case BUILT_IN_STACK_RESTORE:
10889 /* Exception state returns or moves registers around. */
10890 case BUILT_IN_EH_FILTER:
10891 case BUILT_IN_EH_POINTER:
10892 case BUILT_IN_EH_COPY_VALUES:
10893 return true;
10895 default:
10896 return false;
10899 return false;
10902 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10903 most probably expanded inline into reasonably simple code. This is a
10904 superset of is_simple_builtin. */
10905 bool
10906 is_inexpensive_builtin (tree decl)
10908 if (!decl)
10909 return false;
10910 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10911 return true;
10912 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10913 switch (DECL_FUNCTION_CODE (decl))
10915 case BUILT_IN_ABS:
10916 CASE_BUILT_IN_ALLOCA:
10917 case BUILT_IN_BSWAP16:
10918 case BUILT_IN_BSWAP32:
10919 case BUILT_IN_BSWAP64:
10920 case BUILT_IN_CLZ:
10921 case BUILT_IN_CLZIMAX:
10922 case BUILT_IN_CLZL:
10923 case BUILT_IN_CLZLL:
10924 case BUILT_IN_CTZ:
10925 case BUILT_IN_CTZIMAX:
10926 case BUILT_IN_CTZL:
10927 case BUILT_IN_CTZLL:
10928 case BUILT_IN_FFS:
10929 case BUILT_IN_FFSIMAX:
10930 case BUILT_IN_FFSL:
10931 case BUILT_IN_FFSLL:
10932 case BUILT_IN_IMAXABS:
10933 case BUILT_IN_FINITE:
10934 case BUILT_IN_FINITEF:
10935 case BUILT_IN_FINITEL:
10936 case BUILT_IN_FINITED32:
10937 case BUILT_IN_FINITED64:
10938 case BUILT_IN_FINITED128:
10939 case BUILT_IN_FPCLASSIFY:
10940 case BUILT_IN_ISFINITE:
10941 case BUILT_IN_ISINF_SIGN:
10942 case BUILT_IN_ISINF:
10943 case BUILT_IN_ISINFF:
10944 case BUILT_IN_ISINFL:
10945 case BUILT_IN_ISINFD32:
10946 case BUILT_IN_ISINFD64:
10947 case BUILT_IN_ISINFD128:
10948 case BUILT_IN_ISNAN:
10949 case BUILT_IN_ISNANF:
10950 case BUILT_IN_ISNANL:
10951 case BUILT_IN_ISNAND32:
10952 case BUILT_IN_ISNAND64:
10953 case BUILT_IN_ISNAND128:
10954 case BUILT_IN_ISNORMAL:
10955 case BUILT_IN_ISGREATER:
10956 case BUILT_IN_ISGREATEREQUAL:
10957 case BUILT_IN_ISLESS:
10958 case BUILT_IN_ISLESSEQUAL:
10959 case BUILT_IN_ISLESSGREATER:
10960 case BUILT_IN_ISUNORDERED:
10961 case BUILT_IN_VA_ARG_PACK:
10962 case BUILT_IN_VA_ARG_PACK_LEN:
10963 case BUILT_IN_VA_COPY:
10964 case BUILT_IN_TRAP:
10965 case BUILT_IN_SAVEREGS:
10966 case BUILT_IN_POPCOUNTL:
10967 case BUILT_IN_POPCOUNTLL:
10968 case BUILT_IN_POPCOUNTIMAX:
10969 case BUILT_IN_POPCOUNT:
10970 case BUILT_IN_PARITYL:
10971 case BUILT_IN_PARITYLL:
10972 case BUILT_IN_PARITYIMAX:
10973 case BUILT_IN_PARITY:
10974 case BUILT_IN_LABS:
10975 case BUILT_IN_LLABS:
10976 case BUILT_IN_PREFETCH:
10977 case BUILT_IN_ACC_ON_DEVICE:
10978 return true;
10980 default:
10981 return is_simple_builtin (decl);
10984 return false;
10987 /* Return true if T is a constant and the value cast to a target char
10988 can be represented by a host char.
10989 Store the casted char constant in *P if so. */
10991 bool
10992 target_char_cst_p (tree t, char *p)
10994 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10995 return false;
10997 *p = (char)tree_to_uhwi (t);
10998 return true;
11001 /* Return the maximum object size. */
11003 tree
11004 max_object_size (void)
11006 /* To do: Make this a configurable parameter. */
11007 return TYPE_MAX_VALUE (ptrdiff_type_node);