PR tree-optimization/87059
[official-gcc.git] / gcc / builtins.c
blob6a992bd939b87dcc5f576fc06a4a3d26df035319
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 poly_int64 bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = targetm.constant_alignment (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
354 else if (TREE_CODE (exp) == STRING_CST)
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = targetm.constant_alignment (exp, align);
362 known_alignment = true;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
378 /* Account for the alignment of runtime coefficients, so that the constant
379 bitpos is guaranteed to be accurate. */
380 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
381 if (alt_align != 0 && alt_align < align)
383 align = alt_align;
384 known_alignment = false;
387 *alignp = align;
388 *bitposp = bitpos.coeffs[0] & (align - 1);
389 return known_alignment;
392 /* For a memory reference expression EXP compute values M and N such that M
393 divides (&EXP - N) and such that N < M. If these numbers can be determined,
394 store M in alignp and N in *BITPOSP and return true. Otherwise return false
395 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
397 bool
398 get_object_alignment_1 (tree exp, unsigned int *alignp,
399 unsigned HOST_WIDE_INT *bitposp)
401 return get_object_alignment_2 (exp, alignp, bitposp, false);
404 /* Return the alignment in bits of EXP, an object. */
406 unsigned int
407 get_object_alignment (tree exp)
409 unsigned HOST_WIDE_INT bitpos = 0;
410 unsigned int align;
412 get_object_alignment_1 (exp, &align, &bitpos);
414 /* align and bitpos now specify known low bits of the pointer.
415 ptr & (align - 1) == bitpos. */
417 if (bitpos != 0)
418 align = least_bit_hwi (bitpos);
419 return align;
422 /* For a pointer valued expression EXP compute values M and N such that M
423 divides (EXP - N) and such that N < M. If these numbers can be determined,
424 store M in alignp and N in *BITPOSP and return true. Return false if
425 the results are just a conservative approximation.
427 If EXP is not a pointer, false is returned too. */
429 bool
430 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
433 STRIP_NOPS (exp);
435 if (TREE_CODE (exp) == ADDR_EXPR)
436 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
437 alignp, bitposp, true);
438 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
440 unsigned int align;
441 unsigned HOST_WIDE_INT bitpos;
442 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
443 &align, &bitpos);
444 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
445 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
446 else
448 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
449 if (trailing_zeros < HOST_BITS_PER_INT)
451 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
452 if (inner)
453 align = MIN (align, inner);
456 *alignp = align;
457 *bitposp = bitpos & (align - 1);
458 return res;
460 else if (TREE_CODE (exp) == SSA_NAME
461 && POINTER_TYPE_P (TREE_TYPE (exp)))
463 unsigned int ptr_align, ptr_misalign;
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
466 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 *bitposp = ptr_misalign * BITS_PER_UNIT;
469 *alignp = ptr_align * BITS_PER_UNIT;
470 /* Make sure to return a sensible alignment when the multiplication
471 by BITS_PER_UNIT overflowed. */
472 if (*alignp == 0)
473 *alignp = 1u << (HOST_BITS_PER_INT - 1);
474 /* We cannot really tell whether this result is an approximation. */
475 return false;
477 else
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
484 else if (TREE_CODE (exp) == INTEGER_CST)
486 *alignp = BIGGEST_ALIGNMENT;
487 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
488 & (BIGGEST_ALIGNMENT - 1));
489 return true;
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
497 /* Return the alignment in bits of EXP, a pointer valued expression.
498 The alignment returned is, by default, the alignment of the thing that
499 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501 Otherwise, look at the expression to see if we can do better, i.e., if the
502 expression is actually pointing at an object whose alignment is tighter. */
504 unsigned int
505 get_pointer_alignment (tree exp)
507 unsigned HOST_WIDE_INT bitpos = 0;
508 unsigned int align;
510 get_pointer_alignment_1 (exp, &align, &bitpos);
512 /* align and bitpos now specify known low bits of the pointer.
513 ptr & (align - 1) == bitpos. */
515 if (bitpos != 0)
516 align = least_bit_hwi (bitpos);
518 return align;
521 /* Return the number of leading non-zero elements in the sequence
522 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
523 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
525 unsigned
526 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
528 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
530 unsigned n;
532 if (eltsize == 1)
534 /* Optimize the common case of plain char. */
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n;
538 if (!*elt)
539 break;
542 else
544 for (n = 0; n < maxelts; n++)
546 const char *elt = (const char*) ptr + n * eltsize;
547 if (!memcmp (elt, "\0\0\0\0", eltsize))
548 break;
551 return n;
554 /* Compute the length of a null-terminated character string or wide
555 character string handling character sizes of 1, 2, and 4 bytes.
556 TREE_STRING_LENGTH is not the right way because it evaluates to
557 the size of the character array in bytes (as opposed to characters)
558 and because it can contain a zero byte in the middle.
560 ONLY_VALUE should be nonzero if the result is not going to be emitted
561 into the instruction stream and zero if it is going to be expanded.
562 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
563 is returned, otherwise NULL, since
564 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
565 evaluate the side-effects.
567 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
568 accesses. Note that this implies the result is not going to be emitted
569 into the instruction stream.
571 ELTSIZE is 1 for normal single byte character strings, and 2 or
572 4 for wide characer strings. ELTSIZE is by default 1.
574 The value returned is of type `ssizetype'. */
576 tree
577 c_strlen (tree src, int only_value, unsigned eltsize)
579 gcc_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
580 STRIP_NOPS (src);
581 if (TREE_CODE (src) == COND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 tree len1, len2;
586 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
587 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, eltsize);
588 if (tree_int_cst_equal (len1, len2))
589 return len1;
592 if (TREE_CODE (src) == COMPOUND_EXPR
593 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594 return c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
596 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
598 /* Offset from the beginning of the string in bytes. */
599 tree byteoff;
600 tree memsize;
601 src = string_constant (src, &byteoff, &memsize);
602 if (src == 0)
603 return NULL_TREE;
605 /* Determine the size of the string element. */
606 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
607 return NULL_TREE;
609 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
610 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
611 in case the latter is less than the size of the array, such as when
612 SRC refers to a short string literal used to initialize a large array.
613 In that case, the elements of the array after the terminating NUL are
614 all NUL. */
615 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
616 strelts = strelts / eltsize - 1;
618 if (!tree_fits_uhwi_p (memsize))
619 return NULL_TREE;
621 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize - 1;
623 /* PTR can point to the byte representation of any string type, including
624 char* and wchar_t*. */
625 const char *ptr = TREE_STRING_POINTER (src);
627 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
629 /* For empty strings the result should be zero. */
630 if (maxelts == 0)
631 return ssize_int (0);
633 /* The code below works only for single byte character types. */
634 if (eltsize != 1)
635 return NULL_TREE;
637 /* If the string has an internal NUL character followed by any
638 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 the offset to the following NUL if we don't know where to
640 start searching for it. */
641 unsigned len = string_length (ptr, eltsize, strelts);
643 /* Return when an embedded null character is found or none at all. */
644 if (len < strelts || len > maxelts)
645 return NULL_TREE;
647 /* We don't know the starting offset, but we do know that the string
648 has no internal zero bytes. If the offset falls within the bounds
649 of the string subtract the offset from the length of the string,
650 and return that. Otherwise the length is zero. Take care to
651 use SAVE_EXPR in case the OFFSET has side-effects. */
652 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
653 offsave = fold_convert (ssizetype, offsave);
654 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
655 build_int_cst (ssizetype, len));
656 tree lenexp = size_diffop_loc (loc, ssize_int (strelts), offsave);
657 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
658 build_zero_cst (ssizetype));
661 /* Offset from the beginning of the string in elements. */
662 HOST_WIDE_INT eltoff;
664 /* We have a known offset into the string. Start searching there for
665 a null character if we can represent it as a single HOST_WIDE_INT. */
666 if (byteoff == 0)
667 eltoff = 0;
668 else if (! tree_fits_shwi_p (byteoff))
669 eltoff = -1;
670 else
671 eltoff = tree_to_shwi (byteoff) / eltsize;
673 /* If the offset is known to be out of bounds, warn, and call strlen at
674 runtime. */
675 if (eltoff < 0 || eltoff > maxelts)
677 /* Suppress multiple warnings for propagated constant strings. */
678 if (only_value != 2
679 && !TREE_NO_WARNING (src))
681 warning_at (loc, OPT_Warray_bounds,
682 "offset %qwi outside bounds of constant string",
683 eltoff);
684 TREE_NO_WARNING (src) = 1;
686 return NULL_TREE;
689 /* If eltoff is larger than strelts but less than maxelts the
690 string length is zero, since the excess memory will be zero. */
691 if (eltoff > strelts)
692 return ssize_int (0);
694 /* Use strlen to search for the first zero byte. Since any strings
695 constructed with build_string will have nulls appended, we win even
696 if we get handed something like (char[4])"abcd".
698 Since ELTOFF is our starting index into the string, no further
699 calculation is needed. */
700 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
701 strelts - eltoff);
703 return ssize_int (len);
706 /* Return a constant integer corresponding to target reading
707 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
709 static rtx
710 c_readstr (const char *str, scalar_int_mode mode)
712 HOST_WIDE_INT ch;
713 unsigned int i, j;
714 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
716 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
717 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
718 / HOST_BITS_PER_WIDE_INT;
720 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
721 for (i = 0; i < len; i++)
722 tmp[i] = 0;
724 ch = 1;
725 for (i = 0; i < GET_MODE_SIZE (mode); i++)
727 j = i;
728 if (WORDS_BIG_ENDIAN)
729 j = GET_MODE_SIZE (mode) - i - 1;
730 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
731 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
732 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
733 j *= BITS_PER_UNIT;
735 if (ch)
736 ch = (unsigned char) str[i];
737 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
740 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
741 return immed_wide_int_const (c, mode);
744 /* Cast a target constant CST to target CHAR and if that value fits into
745 host char type, return zero and put that value into variable pointed to by
746 P. */
748 static int
749 target_char_cast (tree cst, char *p)
751 unsigned HOST_WIDE_INT val, hostval;
753 if (TREE_CODE (cst) != INTEGER_CST
754 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
755 return 1;
757 /* Do not care if it fits or not right here. */
758 val = TREE_INT_CST_LOW (cst);
760 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
761 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
763 hostval = val;
764 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
765 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
767 if (val != hostval)
768 return 1;
770 *p = hostval;
771 return 0;
774 /* Similar to save_expr, but assumes that arbitrary code is not executed
775 in between the multiple evaluations. In particular, we assume that a
776 non-addressable local variable will not be modified. */
778 static tree
779 builtin_save_expr (tree exp)
781 if (TREE_CODE (exp) == SSA_NAME
782 || (TREE_ADDRESSABLE (exp) == 0
783 && (TREE_CODE (exp) == PARM_DECL
784 || (VAR_P (exp) && !TREE_STATIC (exp)))))
785 return exp;
787 return save_expr (exp);
790 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
791 times to get the address of either a higher stack frame, or a return
792 address located within it (depending on FNDECL_CODE). */
794 static rtx
795 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
797 int i;
798 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
799 if (tem == NULL_RTX)
801 /* For a zero count with __builtin_return_address, we don't care what
802 frame address we return, because target-specific definitions will
803 override us. Therefore frame pointer elimination is OK, and using
804 the soft frame pointer is OK.
806 For a nonzero count, or a zero count with __builtin_frame_address,
807 we require a stable offset from the current frame pointer to the
808 previous one, so we must use the hard frame pointer, and
809 we must disable frame pointer elimination. */
810 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
811 tem = frame_pointer_rtx;
812 else
814 tem = hard_frame_pointer_rtx;
816 /* Tell reload not to eliminate the frame pointer. */
817 crtl->accesses_prior_frames = 1;
821 if (count > 0)
822 SETUP_FRAME_ADDRESSES ();
824 /* On the SPARC, the return address is not in the frame, it is in a
825 register. There is no way to access it off of the current frame
826 pointer, but it can be accessed off the previous frame pointer by
827 reading the value from the register window save area. */
828 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
829 count--;
831 /* Scan back COUNT frames to the specified frame. */
832 for (i = 0; i < count; i++)
834 /* Assume the dynamic chain pointer is in the word that the
835 frame address points to, unless otherwise specified. */
836 tem = DYNAMIC_CHAIN_ADDRESS (tem);
837 tem = memory_address (Pmode, tem);
838 tem = gen_frame_mem (Pmode, tem);
839 tem = copy_to_reg (tem);
842 /* For __builtin_frame_address, return what we've got. But, on
843 the SPARC for example, we may have to add a bias. */
844 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
845 return FRAME_ADDR_RTX (tem);
847 /* For __builtin_return_address, get the return address from that frame. */
848 #ifdef RETURN_ADDR_RTX
849 tem = RETURN_ADDR_RTX (count, tem);
850 #else
851 tem = memory_address (Pmode,
852 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
853 tem = gen_frame_mem (Pmode, tem);
854 #endif
855 return tem;
858 /* Alias set used for setjmp buffer. */
859 static alias_set_type setjmp_alias_set = -1;
861 /* Construct the leading half of a __builtin_setjmp call. Control will
862 return to RECEIVER_LABEL. This is also called directly by the SJLJ
863 exception handling code. */
865 void
866 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
868 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 rtx stack_save;
870 rtx mem;
872 if (setjmp_alias_set == -1)
873 setjmp_alias_set = new_alias_set ();
875 buf_addr = convert_memory_address (Pmode, buf_addr);
877 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
879 /* We store the frame pointer and the address of receiver_label in
880 the buffer and use the rest of it for the stack save area, which
881 is machine-dependent. */
883 mem = gen_rtx_MEM (Pmode, buf_addr);
884 set_mem_alias_set (mem, setjmp_alias_set);
885 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
887 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
888 GET_MODE_SIZE (Pmode))),
889 set_mem_alias_set (mem, setjmp_alias_set);
891 emit_move_insn (validize_mem (mem),
892 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
894 stack_save = gen_rtx_MEM (sa_mode,
895 plus_constant (Pmode, buf_addr,
896 2 * GET_MODE_SIZE (Pmode)));
897 set_mem_alias_set (stack_save, setjmp_alias_set);
898 emit_stack_save (SAVE_NONLOCAL, &stack_save);
900 /* If there is further processing to do, do it. */
901 if (targetm.have_builtin_setjmp_setup ())
902 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
904 /* We have a nonlocal label. */
905 cfun->has_nonlocal_label = 1;
908 /* Construct the trailing part of a __builtin_setjmp call. This is
909 also called directly by the SJLJ exception handling code.
910 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
912 void
913 expand_builtin_setjmp_receiver (rtx receiver_label)
915 rtx chain;
917 /* Mark the FP as used when we get here, so we have to make sure it's
918 marked as used by this function. */
919 emit_use (hard_frame_pointer_rtx);
921 /* Mark the static chain as clobbered here so life information
922 doesn't get messed up for it. */
923 chain = rtx_for_static_chain (current_function_decl, true);
924 if (chain && REG_P (chain))
925 emit_clobber (chain);
927 /* Now put in the code to restore the frame pointer, and argument
928 pointer, if needed. */
929 if (! targetm.have_nonlocal_goto ())
931 /* First adjust our frame pointer to its actual value. It was
932 previously set to the start of the virtual area corresponding to
933 the stacked variables when we branched here and now needs to be
934 adjusted to the actual hardware fp value.
936 Assignments to virtual registers are converted by
937 instantiate_virtual_regs into the corresponding assignment
938 to the underlying register (fp in this case) that makes
939 the original assignment true.
940 So the following insn will actually be decrementing fp by
941 TARGET_STARTING_FRAME_OFFSET. */
942 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
944 /* Restoring the frame pointer also modifies the hard frame pointer.
945 Mark it used (so that the previous assignment remains live once
946 the frame pointer is eliminated) and clobbered (to represent the
947 implicit update from the assignment). */
948 emit_use (hard_frame_pointer_rtx);
949 emit_clobber (hard_frame_pointer_rtx);
952 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
954 /* If the argument pointer can be eliminated in favor of the
955 frame pointer, we don't need to restore it. We assume here
956 that if such an elimination is present, it can always be used.
957 This is the case on all known machines; if we don't make this
958 assumption, we do unnecessary saving on many machines. */
959 size_t i;
960 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
962 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
963 if (elim_regs[i].from == ARG_POINTER_REGNUM
964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
965 break;
967 if (i == ARRAY_SIZE (elim_regs))
969 /* Now restore our arg pointer from the address at which it
970 was saved in our stack frame. */
971 emit_move_insn (crtl->args.internal_arg_pointer,
972 copy_to_reg (get_arg_pointer_save_area ()));
976 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
977 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
978 else if (targetm.have_nonlocal_goto_receiver ())
979 emit_insn (targetm.gen_nonlocal_goto_receiver ());
980 else
981 { /* Nothing */ }
983 /* We must not allow the code we just generated to be reordered by
984 scheduling. Specifically, the update of the frame pointer must
985 happen immediately, not later. */
986 emit_insn (gen_blockage ());
989 /* __builtin_longjmp is passed a pointer to an array of five words (not
990 all will be used on all machines). It operates similarly to the C
991 library function of the same name, but is more efficient. Much of
992 the code below is copied from the handling of non-local gotos. */
994 static void
995 expand_builtin_longjmp (rtx buf_addr, rtx value)
997 rtx fp, lab, stack;
998 rtx_insn *insn, *last;
999 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1001 /* DRAP is needed for stack realign if longjmp is expanded to current
1002 function */
1003 if (SUPPORTS_STACK_ALIGNMENT)
1004 crtl->need_drap = true;
1006 if (setjmp_alias_set == -1)
1007 setjmp_alias_set = new_alias_set ();
1009 buf_addr = convert_memory_address (Pmode, buf_addr);
1011 buf_addr = force_reg (Pmode, buf_addr);
1013 /* We require that the user must pass a second argument of 1, because
1014 that is what builtin_setjmp will return. */
1015 gcc_assert (value == const1_rtx);
1017 last = get_last_insn ();
1018 if (targetm.have_builtin_longjmp ())
1019 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1020 else
1022 fp = gen_rtx_MEM (Pmode, buf_addr);
1023 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1024 GET_MODE_SIZE (Pmode)));
1026 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1027 2 * GET_MODE_SIZE (Pmode)));
1028 set_mem_alias_set (fp, setjmp_alias_set);
1029 set_mem_alias_set (lab, setjmp_alias_set);
1030 set_mem_alias_set (stack, setjmp_alias_set);
1032 /* Pick up FP, label, and SP from the block and jump. This code is
1033 from expand_goto in stmt.c; see there for detailed comments. */
1034 if (targetm.have_nonlocal_goto ())
1035 /* We have to pass a value to the nonlocal_goto pattern that will
1036 get copied into the static_chain pointer, but it does not matter
1037 what that value is, because builtin_setjmp does not use it. */
1038 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1039 else
1041 lab = copy_to_reg (lab);
1043 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1044 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1046 emit_move_insn (hard_frame_pointer_rtx, fp);
1047 emit_stack_restore (SAVE_NONLOCAL, stack);
1049 emit_use (hard_frame_pointer_rtx);
1050 emit_use (stack_pointer_rtx);
1051 emit_indirect_jump (lab);
1055 /* Search backwards and mark the jump insn as a non-local goto.
1056 Note that this precludes the use of __builtin_longjmp to a
1057 __builtin_setjmp target in the same function. However, we've
1058 already cautioned the user that these functions are for
1059 internal exception handling use only. */
1060 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1062 gcc_assert (insn != last);
1064 if (JUMP_P (insn))
1066 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1067 break;
1069 else if (CALL_P (insn))
1070 break;
1074 static inline bool
1075 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1077 return (iter->i < iter->n);
1080 /* This function validates the types of a function call argument list
1081 against a specified list of tree_codes. If the last specifier is a 0,
1082 that represents an ellipsis, otherwise the last specifier must be a
1083 VOID_TYPE. */
1085 static bool
1086 validate_arglist (const_tree callexpr, ...)
1088 enum tree_code code;
1089 bool res = 0;
1090 va_list ap;
1091 const_call_expr_arg_iterator iter;
1092 const_tree arg;
1094 va_start (ap, callexpr);
1095 init_const_call_expr_arg_iterator (callexpr, &iter);
1097 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1098 tree fn = CALL_EXPR_FN (callexpr);
1099 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1101 for (unsigned argno = 1; ; ++argno)
1103 code = (enum tree_code) va_arg (ap, int);
1105 switch (code)
1107 case 0:
1108 /* This signifies an ellipses, any further arguments are all ok. */
1109 res = true;
1110 goto end;
1111 case VOID_TYPE:
1112 /* This signifies an endlink, if no arguments remain, return
1113 true, otherwise return false. */
1114 res = !more_const_call_expr_args_p (&iter);
1115 goto end;
1116 case POINTER_TYPE:
1117 /* The actual argument must be nonnull when either the whole
1118 called function has been declared nonnull, or when the formal
1119 argument corresponding to the actual argument has been. */
1120 if (argmap
1121 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1123 arg = next_const_call_expr_arg (&iter);
1124 if (!validate_arg (arg, code) || integer_zerop (arg))
1125 goto end;
1126 break;
1128 /* FALLTHRU */
1129 default:
1130 /* If no parameters remain or the parameter's code does not
1131 match the specified code, return false. Otherwise continue
1132 checking any remaining arguments. */
1133 arg = next_const_call_expr_arg (&iter);
1134 if (!validate_arg (arg, code))
1135 goto end;
1136 break;
1140 /* We need gotos here since we can only have one VA_CLOSE in a
1141 function. */
1142 end: ;
1143 va_end (ap);
1145 BITMAP_FREE (argmap);
1147 return res;
1150 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1151 and the address of the save area. */
1153 static rtx
1154 expand_builtin_nonlocal_goto (tree exp)
1156 tree t_label, t_save_area;
1157 rtx r_label, r_save_area, r_fp, r_sp;
1158 rtx_insn *insn;
1160 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1161 return NULL_RTX;
1163 t_label = CALL_EXPR_ARG (exp, 0);
1164 t_save_area = CALL_EXPR_ARG (exp, 1);
1166 r_label = expand_normal (t_label);
1167 r_label = convert_memory_address (Pmode, r_label);
1168 r_save_area = expand_normal (t_save_area);
1169 r_save_area = convert_memory_address (Pmode, r_save_area);
1170 /* Copy the address of the save location to a register just in case it was
1171 based on the frame pointer. */
1172 r_save_area = copy_to_reg (r_save_area);
1173 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1174 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1175 plus_constant (Pmode, r_save_area,
1176 GET_MODE_SIZE (Pmode)));
1178 crtl->has_nonlocal_goto = 1;
1180 /* ??? We no longer need to pass the static chain value, afaik. */
1181 if (targetm.have_nonlocal_goto ())
1182 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1183 else
1185 r_label = copy_to_reg (r_label);
1187 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1188 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1190 /* Restore frame pointer for containing function. */
1191 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1192 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1194 /* USE of hard_frame_pointer_rtx added for consistency;
1195 not clear if really needed. */
1196 emit_use (hard_frame_pointer_rtx);
1197 emit_use (stack_pointer_rtx);
1199 /* If the architecture is using a GP register, we must
1200 conservatively assume that the target function makes use of it.
1201 The prologue of functions with nonlocal gotos must therefore
1202 initialize the GP register to the appropriate value, and we
1203 must then make sure that this value is live at the point
1204 of the jump. (Note that this doesn't necessarily apply
1205 to targets with a nonlocal_goto pattern; they are free
1206 to implement it in their own way. Note also that this is
1207 a no-op if the GP register is a global invariant.) */
1208 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1209 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1210 emit_use (pic_offset_table_rtx);
1212 emit_indirect_jump (r_label);
1215 /* Search backwards to the jump insn and mark it as a
1216 non-local goto. */
1217 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1219 if (JUMP_P (insn))
1221 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1222 break;
1224 else if (CALL_P (insn))
1225 break;
1228 return const0_rtx;
1231 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1232 (not all will be used on all machines) that was passed to __builtin_setjmp.
1233 It updates the stack pointer in that block to the current value. This is
1234 also called directly by the SJLJ exception handling code. */
1236 void
1237 expand_builtin_update_setjmp_buf (rtx buf_addr)
1239 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1240 buf_addr = convert_memory_address (Pmode, buf_addr);
1241 rtx stack_save
1242 = gen_rtx_MEM (sa_mode,
1243 memory_address
1244 (sa_mode,
1245 plus_constant (Pmode, buf_addr,
1246 2 * GET_MODE_SIZE (Pmode))));
1248 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1251 /* Expand a call to __builtin_prefetch. For a target that does not support
1252 data prefetch, evaluate the memory address argument in case it has side
1253 effects. */
1255 static void
1256 expand_builtin_prefetch (tree exp)
1258 tree arg0, arg1, arg2;
1259 int nargs;
1260 rtx op0, op1, op2;
1262 if (!validate_arglist (exp, POINTER_TYPE, 0))
1263 return;
1265 arg0 = CALL_EXPR_ARG (exp, 0);
1267 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1268 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1269 locality). */
1270 nargs = call_expr_nargs (exp);
1271 if (nargs > 1)
1272 arg1 = CALL_EXPR_ARG (exp, 1);
1273 else
1274 arg1 = integer_zero_node;
1275 if (nargs > 2)
1276 arg2 = CALL_EXPR_ARG (exp, 2);
1277 else
1278 arg2 = integer_three_node;
1280 /* Argument 0 is an address. */
1281 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1283 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1284 if (TREE_CODE (arg1) != INTEGER_CST)
1286 error ("second argument to %<__builtin_prefetch%> must be a constant");
1287 arg1 = integer_zero_node;
1289 op1 = expand_normal (arg1);
1290 /* Argument 1 must be either zero or one. */
1291 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1293 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1294 " using zero");
1295 op1 = const0_rtx;
1298 /* Argument 2 (locality) must be a compile-time constant int. */
1299 if (TREE_CODE (arg2) != INTEGER_CST)
1301 error ("third argument to %<__builtin_prefetch%> must be a constant");
1302 arg2 = integer_zero_node;
1304 op2 = expand_normal (arg2);
1305 /* Argument 2 must be 0, 1, 2, or 3. */
1306 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1308 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1309 op2 = const0_rtx;
1312 if (targetm.have_prefetch ())
1314 struct expand_operand ops[3];
1316 create_address_operand (&ops[0], op0);
1317 create_integer_operand (&ops[1], INTVAL (op1));
1318 create_integer_operand (&ops[2], INTVAL (op2));
1319 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1320 return;
1323 /* Don't do anything with direct references to volatile memory, but
1324 generate code to handle other side effects. */
1325 if (!MEM_P (op0) && side_effects_p (op0))
1326 emit_insn (op0);
1329 /* Get a MEM rtx for expression EXP which is the address of an operand
1330 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1331 the maximum length of the block of memory that might be accessed or
1332 NULL if unknown. */
1334 static rtx
1335 get_memory_rtx (tree exp, tree len)
1337 tree orig_exp = exp;
1338 rtx addr, mem;
1340 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1341 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1342 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1343 exp = TREE_OPERAND (exp, 0);
1345 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1346 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1348 /* Get an expression we can use to find the attributes to assign to MEM.
1349 First remove any nops. */
1350 while (CONVERT_EXPR_P (exp)
1351 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1352 exp = TREE_OPERAND (exp, 0);
1354 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1355 (as builtin stringops may alias with anything). */
1356 exp = fold_build2 (MEM_REF,
1357 build_array_type (char_type_node,
1358 build_range_type (sizetype,
1359 size_one_node, len)),
1360 exp, build_int_cst (ptr_type_node, 0));
1362 /* If the MEM_REF has no acceptable address, try to get the base object
1363 from the original address we got, and build an all-aliasing
1364 unknown-sized access to that one. */
1365 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1366 set_mem_attributes (mem, exp, 0);
1367 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1368 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1369 0))))
1371 exp = build_fold_addr_expr (exp);
1372 exp = fold_build2 (MEM_REF,
1373 build_array_type (char_type_node,
1374 build_range_type (sizetype,
1375 size_zero_node,
1376 NULL)),
1377 exp, build_int_cst (ptr_type_node, 0));
1378 set_mem_attributes (mem, exp, 0);
1380 set_mem_alias_set (mem, 0);
1381 return mem;
1384 /* Built-in functions to perform an untyped call and return. */
1386 #define apply_args_mode \
1387 (this_target_builtins->x_apply_args_mode)
1388 #define apply_result_mode \
1389 (this_target_builtins->x_apply_result_mode)
1391 /* Return the size required for the block returned by __builtin_apply_args,
1392 and initialize apply_args_mode. */
1394 static int
1395 apply_args_size (void)
1397 static int size = -1;
1398 int align;
1399 unsigned int regno;
1401 /* The values computed by this function never change. */
1402 if (size < 0)
1404 /* The first value is the incoming arg-pointer. */
1405 size = GET_MODE_SIZE (Pmode);
1407 /* The second value is the structure value address unless this is
1408 passed as an "invisible" first argument. */
1409 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1410 size += GET_MODE_SIZE (Pmode);
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if (FUNCTION_ARG_REGNO_P (regno))
1415 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1417 gcc_assert (mode != VOIDmode);
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422 size += GET_MODE_SIZE (mode);
1423 apply_args_mode[regno] = mode;
1425 else
1427 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1430 return size;
1433 /* Return the size required for the block returned by __builtin_apply,
1434 and initialize apply_result_mode. */
1436 static int
1437 apply_result_size (void)
1439 static int size = -1;
1440 int align, regno;
1442 /* The values computed by this function never change. */
1443 if (size < 0)
1445 size = 0;
1447 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1448 if (targetm.calls.function_value_regno_p (regno))
1450 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1452 gcc_assert (mode != VOIDmode);
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 size += GET_MODE_SIZE (mode);
1458 apply_result_mode[regno] = mode;
1460 else
1461 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1463 /* Allow targets that use untyped_call and untyped_return to override
1464 the size so that machine-specific information can be stored here. */
1465 #ifdef APPLY_RESULT_SIZE
1466 size = APPLY_RESULT_SIZE;
1467 #endif
1469 return size;
1472 /* Create a vector describing the result block RESULT. If SAVEP is true,
1473 the result block is used to save the values; otherwise it is used to
1474 restore the values. */
1476 static rtx
1477 result_vector (int savep, rtx result)
1479 int regno, size, align, nelts;
1480 fixed_size_mode mode;
1481 rtx reg, mem;
1482 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1484 size = nelts = 0;
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_result_mode[regno]) != VOIDmode)
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1492 mem = adjust_address (result, mode, size);
1493 savevec[nelts++] = (savep
1494 ? gen_rtx_SET (mem, reg)
1495 : gen_rtx_SET (reg, mem));
1496 size += GET_MODE_SIZE (mode);
1498 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1501 /* Save the state required to perform an untyped call with the same
1502 arguments as were passed to the current function. */
1504 static rtx
1505 expand_builtin_apply_args_1 (void)
1507 rtx registers, tem;
1508 int size, align, regno;
1509 fixed_size_mode mode;
1510 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1512 /* Create a block where the arg-pointer, structure value address,
1513 and argument registers can be saved. */
1514 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1516 /* Walk past the arg-pointer and structure value address. */
1517 size = GET_MODE_SIZE (Pmode);
1518 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1519 size += GET_MODE_SIZE (Pmode);
1521 /* Save each register used in calling a function to the block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_args_mode[regno]) != VOIDmode)
1525 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1526 if (size % align != 0)
1527 size = CEIL (size, align) * align;
1529 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1531 emit_move_insn (adjust_address (registers, mode, size), tem);
1532 size += GET_MODE_SIZE (mode);
1535 /* Save the arg pointer to the block. */
1536 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1537 /* We need the pointer as the caller actually passed them to us, not
1538 as we might have pretended they were passed. Make sure it's a valid
1539 operand, as emit_move_insn isn't expected to handle a PLUS. */
1540 if (STACK_GROWS_DOWNWARD)
1542 = force_operand (plus_constant (Pmode, tem,
1543 crtl->args.pretend_args_size),
1544 NULL_RTX);
1545 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1547 size = GET_MODE_SIZE (Pmode);
1549 /* Save the structure value address unless this is passed as an
1550 "invisible" first argument. */
1551 if (struct_incoming_value)
1553 emit_move_insn (adjust_address (registers, Pmode, size),
1554 copy_to_reg (struct_incoming_value));
1555 size += GET_MODE_SIZE (Pmode);
1558 /* Return the address of the block. */
1559 return copy_addr_to_reg (XEXP (registers, 0));
1562 /* __builtin_apply_args returns block of memory allocated on
1563 the stack into which is stored the arg pointer, structure
1564 value address, static chain, and all the registers that might
1565 possibly be used in performing a function call. The code is
1566 moved to the start of the function so the incoming values are
1567 saved. */
1569 static rtx
1570 expand_builtin_apply_args (void)
1572 /* Don't do __builtin_apply_args more than once in a function.
1573 Save the result of the first call and reuse it. */
1574 if (apply_args_value != 0)
1575 return apply_args_value;
1577 /* When this function is called, it means that registers must be
1578 saved on entry to this function. So we migrate the
1579 call to the first insn of this function. */
1580 rtx temp;
1582 start_sequence ();
1583 temp = expand_builtin_apply_args_1 ();
1584 rtx_insn *seq = get_insns ();
1585 end_sequence ();
1587 apply_args_value = temp;
1589 /* Put the insns after the NOTE that starts the function.
1590 If this is inside a start_sequence, make the outer-level insn
1591 chain current, so the code is placed at the start of the
1592 function. If internal_arg_pointer is a non-virtual pseudo,
1593 it needs to be placed after the function that initializes
1594 that pseudo. */
1595 push_topmost_sequence ();
1596 if (REG_P (crtl->args.internal_arg_pointer)
1597 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1598 emit_insn_before (seq, parm_birth_insn);
1599 else
1600 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1601 pop_topmost_sequence ();
1602 return temp;
1606 /* Perform an untyped call and save the state required to perform an
1607 untyped return of whatever value was returned by the given function. */
1609 static rtx
1610 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1612 int size, align, regno;
1613 fixed_size_mode mode;
1614 rtx incoming_args, result, reg, dest, src;
1615 rtx_call_insn *call_insn;
1616 rtx old_stack_level = 0;
1617 rtx call_fusage = 0;
1618 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1620 arguments = convert_memory_address (Pmode, arguments);
1622 /* Create a block where the return registers can be saved. */
1623 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1625 /* Fetch the arg pointer from the ARGUMENTS block. */
1626 incoming_args = gen_reg_rtx (Pmode);
1627 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1628 if (!STACK_GROWS_DOWNWARD)
1629 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1630 incoming_args, 0, OPTAB_LIB_WIDEN);
1632 /* Push a new argument block and copy the arguments. Do not allow
1633 the (potential) memcpy call below to interfere with our stack
1634 manipulations. */
1635 do_pending_stack_adjust ();
1636 NO_DEFER_POP;
1638 /* Save the stack with nonlocal if available. */
1639 if (targetm.have_save_stack_nonlocal ())
1640 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1641 else
1642 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1644 /* Allocate a block of memory onto the stack and copy the memory
1645 arguments to the outgoing arguments address. We can pass TRUE
1646 as the 4th argument because we just saved the stack pointer
1647 and will restore it right after the call. */
1648 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1650 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1651 may have already set current_function_calls_alloca to true.
1652 current_function_calls_alloca won't be set if argsize is zero,
1653 so we have to guarantee need_drap is true here. */
1654 if (SUPPORTS_STACK_ALIGNMENT)
1655 crtl->need_drap = true;
1657 dest = virtual_outgoing_args_rtx;
1658 if (!STACK_GROWS_DOWNWARD)
1660 if (CONST_INT_P (argsize))
1661 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1662 else
1663 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1665 dest = gen_rtx_MEM (BLKmode, dest);
1666 set_mem_align (dest, PARM_BOUNDARY);
1667 src = gen_rtx_MEM (BLKmode, incoming_args);
1668 set_mem_align (src, PARM_BOUNDARY);
1669 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1671 /* Refer to the argument block. */
1672 apply_args_size ();
1673 arguments = gen_rtx_MEM (BLKmode, arguments);
1674 set_mem_align (arguments, PARM_BOUNDARY);
1676 /* Walk past the arg-pointer and structure value address. */
1677 size = GET_MODE_SIZE (Pmode);
1678 if (struct_value)
1679 size += GET_MODE_SIZE (Pmode);
1681 /* Restore each of the registers previously saved. Make USE insns
1682 for each of these registers for use in making the call. */
1683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1684 if ((mode = apply_args_mode[regno]) != VOIDmode)
1686 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1687 if (size % align != 0)
1688 size = CEIL (size, align) * align;
1689 reg = gen_rtx_REG (mode, regno);
1690 emit_move_insn (reg, adjust_address (arguments, mode, size));
1691 use_reg (&call_fusage, reg);
1692 size += GET_MODE_SIZE (mode);
1695 /* Restore the structure value address unless this is passed as an
1696 "invisible" first argument. */
1697 size = GET_MODE_SIZE (Pmode);
1698 if (struct_value)
1700 rtx value = gen_reg_rtx (Pmode);
1701 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1702 emit_move_insn (struct_value, value);
1703 if (REG_P (struct_value))
1704 use_reg (&call_fusage, struct_value);
1705 size += GET_MODE_SIZE (Pmode);
1708 /* All arguments and registers used for the call are set up by now! */
1709 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1711 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1712 and we don't want to load it into a register as an optimization,
1713 because prepare_call_address already did it if it should be done. */
1714 if (GET_CODE (function) != SYMBOL_REF)
1715 function = memory_address (FUNCTION_MODE, function);
1717 /* Generate the actual call instruction and save the return value. */
1718 if (targetm.have_untyped_call ())
1720 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1721 emit_call_insn (targetm.gen_untyped_call (mem, result,
1722 result_vector (1, result)));
1724 else if (targetm.have_call_value ())
1726 rtx valreg = 0;
1728 /* Locate the unique return register. It is not possible to
1729 express a call that sets more than one return register using
1730 call_value; use untyped_call for that. In fact, untyped_call
1731 only needs to save the return registers in the given block. */
1732 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1733 if ((mode = apply_result_mode[regno]) != VOIDmode)
1735 gcc_assert (!valreg); /* have_untyped_call required. */
1737 valreg = gen_rtx_REG (mode, regno);
1740 emit_insn (targetm.gen_call_value (valreg,
1741 gen_rtx_MEM (FUNCTION_MODE, function),
1742 const0_rtx, NULL_RTX, const0_rtx));
1744 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1746 else
1747 gcc_unreachable ();
1749 /* Find the CALL insn we just emitted, and attach the register usage
1750 information. */
1751 call_insn = last_call_insn ();
1752 add_function_usage_to (call_insn, call_fusage);
1754 /* Restore the stack. */
1755 if (targetm.have_save_stack_nonlocal ())
1756 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1757 else
1758 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1759 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1761 OK_DEFER_POP;
1763 /* Return the address of the result block. */
1764 result = copy_addr_to_reg (XEXP (result, 0));
1765 return convert_memory_address (ptr_mode, result);
1768 /* Perform an untyped return. */
1770 static void
1771 expand_builtin_return (rtx result)
1773 int size, align, regno;
1774 fixed_size_mode mode;
1775 rtx reg;
1776 rtx_insn *call_fusage = 0;
1778 result = convert_memory_address (Pmode, result);
1780 apply_result_size ();
1781 result = gen_rtx_MEM (BLKmode, result);
1783 if (targetm.have_untyped_return ())
1785 rtx vector = result_vector (0, result);
1786 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1787 emit_barrier ();
1788 return;
1791 /* Restore the return value and note that each value is used. */
1792 size = 0;
1793 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1794 if ((mode = apply_result_mode[regno]) != VOIDmode)
1796 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1797 if (size % align != 0)
1798 size = CEIL (size, align) * align;
1799 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1800 emit_move_insn (reg, adjust_address (result, mode, size));
1802 push_to_sequence (call_fusage);
1803 emit_use (reg);
1804 call_fusage = get_insns ();
1805 end_sequence ();
1806 size += GET_MODE_SIZE (mode);
1809 /* Put the USE insns before the return. */
1810 emit_insn (call_fusage);
1812 /* Return whatever values was restored by jumping directly to the end
1813 of the function. */
1814 expand_naked_return ();
1817 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1819 static enum type_class
1820 type_to_class (tree type)
1822 switch (TREE_CODE (type))
1824 case VOID_TYPE: return void_type_class;
1825 case INTEGER_TYPE: return integer_type_class;
1826 case ENUMERAL_TYPE: return enumeral_type_class;
1827 case BOOLEAN_TYPE: return boolean_type_class;
1828 case POINTER_TYPE: return pointer_type_class;
1829 case REFERENCE_TYPE: return reference_type_class;
1830 case OFFSET_TYPE: return offset_type_class;
1831 case REAL_TYPE: return real_type_class;
1832 case COMPLEX_TYPE: return complex_type_class;
1833 case FUNCTION_TYPE: return function_type_class;
1834 case METHOD_TYPE: return method_type_class;
1835 case RECORD_TYPE: return record_type_class;
1836 case UNION_TYPE:
1837 case QUAL_UNION_TYPE: return union_type_class;
1838 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1839 ? string_type_class : array_type_class);
1840 case LANG_TYPE: return lang_type_class;
1841 default: return no_type_class;
1845 /* Expand a call EXP to __builtin_classify_type. */
1847 static rtx
1848 expand_builtin_classify_type (tree exp)
1850 if (call_expr_nargs (exp))
1851 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1852 return GEN_INT (no_type_class);
1855 /* This helper macro, meant to be used in mathfn_built_in below, determines
1856 which among a set of builtin math functions is appropriate for a given type
1857 mode. The `F' (float) and `L' (long double) are automatically generated
1858 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1859 types, there are additional types that are considered with 'F32', 'F64',
1860 'F128', etc. suffixes. */
1861 #define CASE_MATHFN(MATHFN) \
1862 CASE_CFN_##MATHFN: \
1863 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1864 fcodel = BUILT_IN_##MATHFN##L ; break;
1865 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1866 types. */
1867 #define CASE_MATHFN_FLOATN(MATHFN) \
1868 CASE_CFN_##MATHFN: \
1869 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1870 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1871 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1872 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1873 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1874 break;
1875 /* Similar to above, but appends _R after any F/L suffix. */
1876 #define CASE_MATHFN_REENT(MATHFN) \
1877 case CFN_BUILT_IN_##MATHFN##_R: \
1878 case CFN_BUILT_IN_##MATHFN##F_R: \
1879 case CFN_BUILT_IN_##MATHFN##L_R: \
1880 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1881 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1883 /* Return a function equivalent to FN but operating on floating-point
1884 values of type TYPE, or END_BUILTINS if no such function exists.
1885 This is purely an operation on function codes; it does not guarantee
1886 that the target actually has an implementation of the function. */
1888 static built_in_function
1889 mathfn_built_in_2 (tree type, combined_fn fn)
1891 tree mtype;
1892 built_in_function fcode, fcodef, fcodel;
1893 built_in_function fcodef16 = END_BUILTINS;
1894 built_in_function fcodef32 = END_BUILTINS;
1895 built_in_function fcodef64 = END_BUILTINS;
1896 built_in_function fcodef128 = END_BUILTINS;
1897 built_in_function fcodef32x = END_BUILTINS;
1898 built_in_function fcodef64x = END_BUILTINS;
1899 built_in_function fcodef128x = END_BUILTINS;
1901 switch (fn)
1903 CASE_MATHFN (ACOS)
1904 CASE_MATHFN (ACOSH)
1905 CASE_MATHFN (ASIN)
1906 CASE_MATHFN (ASINH)
1907 CASE_MATHFN (ATAN)
1908 CASE_MATHFN (ATAN2)
1909 CASE_MATHFN (ATANH)
1910 CASE_MATHFN (CBRT)
1911 CASE_MATHFN_FLOATN (CEIL)
1912 CASE_MATHFN (CEXPI)
1913 CASE_MATHFN_FLOATN (COPYSIGN)
1914 CASE_MATHFN (COS)
1915 CASE_MATHFN (COSH)
1916 CASE_MATHFN (DREM)
1917 CASE_MATHFN (ERF)
1918 CASE_MATHFN (ERFC)
1919 CASE_MATHFN (EXP)
1920 CASE_MATHFN (EXP10)
1921 CASE_MATHFN (EXP2)
1922 CASE_MATHFN (EXPM1)
1923 CASE_MATHFN (FABS)
1924 CASE_MATHFN (FDIM)
1925 CASE_MATHFN_FLOATN (FLOOR)
1926 CASE_MATHFN_FLOATN (FMA)
1927 CASE_MATHFN_FLOATN (FMAX)
1928 CASE_MATHFN_FLOATN (FMIN)
1929 CASE_MATHFN (FMOD)
1930 CASE_MATHFN (FREXP)
1931 CASE_MATHFN (GAMMA)
1932 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1933 CASE_MATHFN (HUGE_VAL)
1934 CASE_MATHFN (HYPOT)
1935 CASE_MATHFN (ILOGB)
1936 CASE_MATHFN (ICEIL)
1937 CASE_MATHFN (IFLOOR)
1938 CASE_MATHFN (INF)
1939 CASE_MATHFN (IRINT)
1940 CASE_MATHFN (IROUND)
1941 CASE_MATHFN (ISINF)
1942 CASE_MATHFN (J0)
1943 CASE_MATHFN (J1)
1944 CASE_MATHFN (JN)
1945 CASE_MATHFN (LCEIL)
1946 CASE_MATHFN (LDEXP)
1947 CASE_MATHFN (LFLOOR)
1948 CASE_MATHFN (LGAMMA)
1949 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1950 CASE_MATHFN (LLCEIL)
1951 CASE_MATHFN (LLFLOOR)
1952 CASE_MATHFN (LLRINT)
1953 CASE_MATHFN (LLROUND)
1954 CASE_MATHFN (LOG)
1955 CASE_MATHFN (LOG10)
1956 CASE_MATHFN (LOG1P)
1957 CASE_MATHFN (LOG2)
1958 CASE_MATHFN (LOGB)
1959 CASE_MATHFN (LRINT)
1960 CASE_MATHFN (LROUND)
1961 CASE_MATHFN (MODF)
1962 CASE_MATHFN (NAN)
1963 CASE_MATHFN (NANS)
1964 CASE_MATHFN_FLOATN (NEARBYINT)
1965 CASE_MATHFN (NEXTAFTER)
1966 CASE_MATHFN (NEXTTOWARD)
1967 CASE_MATHFN (POW)
1968 CASE_MATHFN (POWI)
1969 CASE_MATHFN (POW10)
1970 CASE_MATHFN (REMAINDER)
1971 CASE_MATHFN (REMQUO)
1972 CASE_MATHFN_FLOATN (RINT)
1973 CASE_MATHFN_FLOATN (ROUND)
1974 CASE_MATHFN (SCALB)
1975 CASE_MATHFN (SCALBLN)
1976 CASE_MATHFN (SCALBN)
1977 CASE_MATHFN (SIGNBIT)
1978 CASE_MATHFN (SIGNIFICAND)
1979 CASE_MATHFN (SIN)
1980 CASE_MATHFN (SINCOS)
1981 CASE_MATHFN (SINH)
1982 CASE_MATHFN_FLOATN (SQRT)
1983 CASE_MATHFN (TAN)
1984 CASE_MATHFN (TANH)
1985 CASE_MATHFN (TGAMMA)
1986 CASE_MATHFN_FLOATN (TRUNC)
1987 CASE_MATHFN (Y0)
1988 CASE_MATHFN (Y1)
1989 CASE_MATHFN (YN)
1991 default:
1992 return END_BUILTINS;
1995 mtype = TYPE_MAIN_VARIANT (type);
1996 if (mtype == double_type_node)
1997 return fcode;
1998 else if (mtype == float_type_node)
1999 return fcodef;
2000 else if (mtype == long_double_type_node)
2001 return fcodel;
2002 else if (mtype == float16_type_node)
2003 return fcodef16;
2004 else if (mtype == float32_type_node)
2005 return fcodef32;
2006 else if (mtype == float64_type_node)
2007 return fcodef64;
2008 else if (mtype == float128_type_node)
2009 return fcodef128;
2010 else if (mtype == float32x_type_node)
2011 return fcodef32x;
2012 else if (mtype == float64x_type_node)
2013 return fcodef64x;
2014 else if (mtype == float128x_type_node)
2015 return fcodef128x;
2016 else
2017 return END_BUILTINS;
2020 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2021 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2022 otherwise use the explicit declaration. If we can't do the conversion,
2023 return null. */
2025 static tree
2026 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2028 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2029 if (fcode2 == END_BUILTINS)
2030 return NULL_TREE;
2032 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2033 return NULL_TREE;
2035 return builtin_decl_explicit (fcode2);
2038 /* Like mathfn_built_in_1, but always use the implicit array. */
2040 tree
2041 mathfn_built_in (tree type, combined_fn fn)
2043 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2046 /* Like mathfn_built_in_1, but take a built_in_function and
2047 always use the implicit array. */
2049 tree
2050 mathfn_built_in (tree type, enum built_in_function fn)
2052 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2055 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2056 return its code, otherwise return IFN_LAST. Note that this function
2057 only tests whether the function is defined in internals.def, not whether
2058 it is actually available on the target. */
2060 internal_fn
2061 associated_internal_fn (tree fndecl)
2063 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2064 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2065 switch (DECL_FUNCTION_CODE (fndecl))
2067 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2068 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2069 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2070 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2071 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2072 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2073 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2074 #include "internal-fn.def"
2076 CASE_FLT_FN (BUILT_IN_POW10):
2077 return IFN_EXP10;
2079 CASE_FLT_FN (BUILT_IN_DREM):
2080 return IFN_REMAINDER;
2082 CASE_FLT_FN (BUILT_IN_SCALBN):
2083 CASE_FLT_FN (BUILT_IN_SCALBLN):
2084 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2085 return IFN_LDEXP;
2086 return IFN_LAST;
2088 default:
2089 return IFN_LAST;
2093 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2094 on the current target by a call to an internal function, return the
2095 code of that internal function, otherwise return IFN_LAST. The caller
2096 is responsible for ensuring that any side-effects of the built-in
2097 call are dealt with correctly. E.g. if CALL sets errno, the caller
2098 must decide that the errno result isn't needed or make it available
2099 in some other way. */
2101 internal_fn
2102 replacement_internal_fn (gcall *call)
2104 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2106 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2107 if (ifn != IFN_LAST)
2109 tree_pair types = direct_internal_fn_types (ifn, call);
2110 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2111 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2112 return ifn;
2115 return IFN_LAST;
2118 /* Expand a call to the builtin trinary math functions (fma).
2119 Return NULL_RTX if a normal call should be emitted rather than expanding the
2120 function in-line. EXP is the expression that is a call to the builtin
2121 function; if convenient, the result should be placed in TARGET.
2122 SUBTARGET may be used as the target for computing one of EXP's
2123 operands. */
2125 static rtx
2126 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2128 optab builtin_optab;
2129 rtx op0, op1, op2, result;
2130 rtx_insn *insns;
2131 tree fndecl = get_callee_fndecl (exp);
2132 tree arg0, arg1, arg2;
2133 machine_mode mode;
2135 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2136 return NULL_RTX;
2138 arg0 = CALL_EXPR_ARG (exp, 0);
2139 arg1 = CALL_EXPR_ARG (exp, 1);
2140 arg2 = CALL_EXPR_ARG (exp, 2);
2142 switch (DECL_FUNCTION_CODE (fndecl))
2144 CASE_FLT_FN (BUILT_IN_FMA):
2145 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2146 builtin_optab = fma_optab; break;
2147 default:
2148 gcc_unreachable ();
2151 /* Make a suitable register to place result in. */
2152 mode = TYPE_MODE (TREE_TYPE (exp));
2154 /* Before working hard, check whether the instruction is available. */
2155 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2156 return NULL_RTX;
2158 result = gen_reg_rtx (mode);
2160 /* Always stabilize the argument list. */
2161 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2162 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2163 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2165 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2166 op1 = expand_normal (arg1);
2167 op2 = expand_normal (arg2);
2169 start_sequence ();
2171 /* Compute into RESULT.
2172 Set RESULT to wherever the result comes back. */
2173 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2174 result, 0);
2176 /* If we were unable to expand via the builtin, stop the sequence
2177 (without outputting the insns) and call to the library function
2178 with the stabilized argument list. */
2179 if (result == 0)
2181 end_sequence ();
2182 return expand_call (exp, target, target == const0_rtx);
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2187 end_sequence ();
2188 emit_insn (insns);
2190 return result;
2193 /* Expand a call to the builtin sin and cos math functions.
2194 Return NULL_RTX if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET.
2197 SUBTARGET may be used as the target for computing one of EXP's
2198 operands. */
2200 static rtx
2201 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2203 optab builtin_optab;
2204 rtx op0;
2205 rtx_insn *insns;
2206 tree fndecl = get_callee_fndecl (exp);
2207 machine_mode mode;
2208 tree arg;
2210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2211 return NULL_RTX;
2213 arg = CALL_EXPR_ARG (exp, 0);
2215 switch (DECL_FUNCTION_CODE (fndecl))
2217 CASE_FLT_FN (BUILT_IN_SIN):
2218 CASE_FLT_FN (BUILT_IN_COS):
2219 builtin_optab = sincos_optab; break;
2220 default:
2221 gcc_unreachable ();
2224 /* Make a suitable register to place result in. */
2225 mode = TYPE_MODE (TREE_TYPE (exp));
2227 /* Check if sincos insn is available, otherwise fallback
2228 to sin or cos insn. */
2229 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2230 switch (DECL_FUNCTION_CODE (fndecl))
2232 CASE_FLT_FN (BUILT_IN_SIN):
2233 builtin_optab = sin_optab; break;
2234 CASE_FLT_FN (BUILT_IN_COS):
2235 builtin_optab = cos_optab; break;
2236 default:
2237 gcc_unreachable ();
2240 /* Before working hard, check whether the instruction is available. */
2241 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2243 rtx result = gen_reg_rtx (mode);
2245 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2246 need to expand the argument again. This way, we will not perform
2247 side-effects more the once. */
2248 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2250 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2252 start_sequence ();
2254 /* Compute into RESULT.
2255 Set RESULT to wherever the result comes back. */
2256 if (builtin_optab == sincos_optab)
2258 int ok;
2260 switch (DECL_FUNCTION_CODE (fndecl))
2262 CASE_FLT_FN (BUILT_IN_SIN):
2263 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2264 break;
2265 CASE_FLT_FN (BUILT_IN_COS):
2266 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2267 break;
2268 default:
2269 gcc_unreachable ();
2271 gcc_assert (ok);
2273 else
2274 result = expand_unop (mode, builtin_optab, op0, result, 0);
2276 if (result != 0)
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2282 return result;
2285 /* If we were unable to expand via the builtin, stop the sequence
2286 (without outputting the insns) and call to the library function
2287 with the stabilized argument list. */
2288 end_sequence ();
2291 return expand_call (exp, target, target == const0_rtx);
2294 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2295 return an RTL instruction code that implements the functionality.
2296 If that isn't possible or available return CODE_FOR_nothing. */
2298 static enum insn_code
2299 interclass_mathfn_icode (tree arg, tree fndecl)
2301 bool errno_set = false;
2302 optab builtin_optab = unknown_optab;
2303 machine_mode mode;
2305 switch (DECL_FUNCTION_CODE (fndecl))
2307 CASE_FLT_FN (BUILT_IN_ILOGB):
2308 errno_set = true; builtin_optab = ilogb_optab; break;
2309 CASE_FLT_FN (BUILT_IN_ISINF):
2310 builtin_optab = isinf_optab; break;
2311 case BUILT_IN_ISNORMAL:
2312 case BUILT_IN_ISFINITE:
2313 CASE_FLT_FN (BUILT_IN_FINITE):
2314 case BUILT_IN_FINITED32:
2315 case BUILT_IN_FINITED64:
2316 case BUILT_IN_FINITED128:
2317 case BUILT_IN_ISINFD32:
2318 case BUILT_IN_ISINFD64:
2319 case BUILT_IN_ISINFD128:
2320 /* These builtins have no optabs (yet). */
2321 break;
2322 default:
2323 gcc_unreachable ();
2326 /* There's no easy way to detect the case we need to set EDOM. */
2327 if (flag_errno_math && errno_set)
2328 return CODE_FOR_nothing;
2330 /* Optab mode depends on the mode of the input argument. */
2331 mode = TYPE_MODE (TREE_TYPE (arg));
2333 if (builtin_optab)
2334 return optab_handler (builtin_optab, mode);
2335 return CODE_FOR_nothing;
2338 /* Expand a call to one of the builtin math functions that operate on
2339 floating point argument and output an integer result (ilogb, isinf,
2340 isnan, etc).
2341 Return 0 if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function; if convenient, the result should be placed in TARGET. */
2345 static rtx
2346 expand_builtin_interclass_mathfn (tree exp, rtx target)
2348 enum insn_code icode = CODE_FOR_nothing;
2349 rtx op0;
2350 tree fndecl = get_callee_fndecl (exp);
2351 machine_mode mode;
2352 tree arg;
2354 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2355 return NULL_RTX;
2357 arg = CALL_EXPR_ARG (exp, 0);
2358 icode = interclass_mathfn_icode (arg, fndecl);
2359 mode = TYPE_MODE (TREE_TYPE (arg));
2361 if (icode != CODE_FOR_nothing)
2363 struct expand_operand ops[1];
2364 rtx_insn *last = get_last_insn ();
2365 tree orig_arg = arg;
2367 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2368 need to expand the argument again. This way, we will not perform
2369 side-effects more the once. */
2370 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2372 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2374 if (mode != GET_MODE (op0))
2375 op0 = convert_to_mode (mode, op0, 0);
2377 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2378 if (maybe_legitimize_operands (icode, 0, 1, ops)
2379 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2380 return ops[0].value;
2382 delete_insns_since (last);
2383 CALL_EXPR_ARG (exp, 0) = orig_arg;
2386 return NULL_RTX;
2389 /* Expand a call to the builtin sincos math function.
2390 Return NULL_RTX if a normal call should be emitted rather than expanding the
2391 function in-line. EXP is the expression that is a call to the builtin
2392 function. */
2394 static rtx
2395 expand_builtin_sincos (tree exp)
2397 rtx op0, op1, op2, target1, target2;
2398 machine_mode mode;
2399 tree arg, sinp, cosp;
2400 int result;
2401 location_t loc = EXPR_LOCATION (exp);
2402 tree alias_type, alias_off;
2404 if (!validate_arglist (exp, REAL_TYPE,
2405 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2406 return NULL_RTX;
2408 arg = CALL_EXPR_ARG (exp, 0);
2409 sinp = CALL_EXPR_ARG (exp, 1);
2410 cosp = CALL_EXPR_ARG (exp, 2);
2412 /* Make a suitable register to place result in. */
2413 mode = TYPE_MODE (TREE_TYPE (arg));
2415 /* Check if sincos insn is available, otherwise emit the call. */
2416 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2417 return NULL_RTX;
2419 target1 = gen_reg_rtx (mode);
2420 target2 = gen_reg_rtx (mode);
2422 op0 = expand_normal (arg);
2423 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2424 alias_off = build_int_cst (alias_type, 0);
2425 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2426 sinp, alias_off));
2427 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2428 cosp, alias_off));
2430 /* Compute into target1 and target2.
2431 Set TARGET to wherever the result comes back. */
2432 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2433 gcc_assert (result);
2435 /* Move target1 and target2 to the memory locations indicated
2436 by op1 and op2. */
2437 emit_move_insn (op1, target1);
2438 emit_move_insn (op2, target2);
2440 return const0_rtx;
2443 /* Expand a call to the internal cexpi builtin to the sincos math function.
2444 EXP is the expression that is a call to the builtin function; if convenient,
2445 the result should be placed in TARGET. */
2447 static rtx
2448 expand_builtin_cexpi (tree exp, rtx target)
2450 tree fndecl = get_callee_fndecl (exp);
2451 tree arg, type;
2452 machine_mode mode;
2453 rtx op0, op1, op2;
2454 location_t loc = EXPR_LOCATION (exp);
2456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2457 return NULL_RTX;
2459 arg = CALL_EXPR_ARG (exp, 0);
2460 type = TREE_TYPE (arg);
2461 mode = TYPE_MODE (TREE_TYPE (arg));
2463 /* Try expanding via a sincos optab, fall back to emitting a libcall
2464 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2465 is only generated from sincos, cexp or if we have either of them. */
2466 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2468 op1 = gen_reg_rtx (mode);
2469 op2 = gen_reg_rtx (mode);
2471 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2473 /* Compute into op1 and op2. */
2474 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2476 else if (targetm.libc_has_function (function_sincos))
2478 tree call, fn = NULL_TREE;
2479 tree top1, top2;
2480 rtx op1a, op2a;
2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2483 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2485 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2487 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2488 else
2489 gcc_unreachable ();
2491 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2492 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2493 op1a = copy_addr_to_reg (XEXP (op1, 0));
2494 op2a = copy_addr_to_reg (XEXP (op2, 0));
2495 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2496 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2498 /* Make sure not to fold the sincos call again. */
2499 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2500 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2501 call, 3, arg, top1, top2));
2503 else
2505 tree call, fn = NULL_TREE, narg;
2506 tree ctype = build_complex_type (type);
2508 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2509 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2510 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2511 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2512 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2513 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2514 else
2515 gcc_unreachable ();
2517 /* If we don't have a decl for cexp create one. This is the
2518 friendliest fallback if the user calls __builtin_cexpi
2519 without full target C99 function support. */
2520 if (fn == NULL_TREE)
2522 tree fntype;
2523 const char *name = NULL;
2525 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2526 name = "cexpf";
2527 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2528 name = "cexp";
2529 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2530 name = "cexpl";
2532 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2533 fn = build_fn_decl (name, fntype);
2536 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2537 build_real (type, dconst0), arg);
2539 /* Make sure not to fold the cexp call again. */
2540 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2541 return expand_expr (build_call_nary (ctype, call, 1, narg),
2542 target, VOIDmode, EXPAND_NORMAL);
2545 /* Now build the proper return type. */
2546 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2547 make_tree (TREE_TYPE (arg), op2),
2548 make_tree (TREE_TYPE (arg), op1)),
2549 target, VOIDmode, EXPAND_NORMAL);
2552 /* Conveniently construct a function call expression. FNDECL names the
2553 function to be called, N is the number of arguments, and the "..."
2554 parameters are the argument expressions. Unlike build_call_exr
2555 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2557 static tree
2558 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2560 va_list ap;
2561 tree fntype = TREE_TYPE (fndecl);
2562 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2564 va_start (ap, n);
2565 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2566 va_end (ap);
2567 SET_EXPR_LOCATION (fn, loc);
2568 return fn;
2571 /* Expand a call to one of the builtin rounding functions gcc defines
2572 as an extension (lfloor and lceil). As these are gcc extensions we
2573 do not need to worry about setting errno to EDOM.
2574 If expanding via optab fails, lower expression to (int)(floor(x)).
2575 EXP is the expression that is a call to the builtin function;
2576 if convenient, the result should be placed in TARGET. */
2578 static rtx
2579 expand_builtin_int_roundingfn (tree exp, rtx target)
2581 convert_optab builtin_optab;
2582 rtx op0, tmp;
2583 rtx_insn *insns;
2584 tree fndecl = get_callee_fndecl (exp);
2585 enum built_in_function fallback_fn;
2586 tree fallback_fndecl;
2587 machine_mode mode;
2588 tree arg;
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 gcc_unreachable ();
2593 arg = CALL_EXPR_ARG (exp, 0);
2595 switch (DECL_FUNCTION_CODE (fndecl))
2597 CASE_FLT_FN (BUILT_IN_ICEIL):
2598 CASE_FLT_FN (BUILT_IN_LCEIL):
2599 CASE_FLT_FN (BUILT_IN_LLCEIL):
2600 builtin_optab = lceil_optab;
2601 fallback_fn = BUILT_IN_CEIL;
2602 break;
2604 CASE_FLT_FN (BUILT_IN_IFLOOR):
2605 CASE_FLT_FN (BUILT_IN_LFLOOR):
2606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2607 builtin_optab = lfloor_optab;
2608 fallback_fn = BUILT_IN_FLOOR;
2609 break;
2611 default:
2612 gcc_unreachable ();
2615 /* Make a suitable register to place result in. */
2616 mode = TYPE_MODE (TREE_TYPE (exp));
2618 target = gen_reg_rtx (mode);
2620 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2621 need to expand the argument again. This way, we will not perform
2622 side-effects more the once. */
2623 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2625 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2627 start_sequence ();
2629 /* Compute into TARGET. */
2630 if (expand_sfix_optab (target, op0, builtin_optab))
2632 /* Output the entire sequence. */
2633 insns = get_insns ();
2634 end_sequence ();
2635 emit_insn (insns);
2636 return target;
2639 /* If we were unable to expand via the builtin, stop the sequence
2640 (without outputting the insns). */
2641 end_sequence ();
2643 /* Fall back to floating point rounding optab. */
2644 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2646 /* For non-C99 targets we may end up without a fallback fndecl here
2647 if the user called __builtin_lfloor directly. In this case emit
2648 a call to the floor/ceil variants nevertheless. This should result
2649 in the best user experience for not full C99 targets. */
2650 if (fallback_fndecl == NULL_TREE)
2652 tree fntype;
2653 const char *name = NULL;
2655 switch (DECL_FUNCTION_CODE (fndecl))
2657 case BUILT_IN_ICEIL:
2658 case BUILT_IN_LCEIL:
2659 case BUILT_IN_LLCEIL:
2660 name = "ceil";
2661 break;
2662 case BUILT_IN_ICEILF:
2663 case BUILT_IN_LCEILF:
2664 case BUILT_IN_LLCEILF:
2665 name = "ceilf";
2666 break;
2667 case BUILT_IN_ICEILL:
2668 case BUILT_IN_LCEILL:
2669 case BUILT_IN_LLCEILL:
2670 name = "ceill";
2671 break;
2672 case BUILT_IN_IFLOOR:
2673 case BUILT_IN_LFLOOR:
2674 case BUILT_IN_LLFLOOR:
2675 name = "floor";
2676 break;
2677 case BUILT_IN_IFLOORF:
2678 case BUILT_IN_LFLOORF:
2679 case BUILT_IN_LLFLOORF:
2680 name = "floorf";
2681 break;
2682 case BUILT_IN_IFLOORL:
2683 case BUILT_IN_LFLOORL:
2684 case BUILT_IN_LLFLOORL:
2685 name = "floorl";
2686 break;
2687 default:
2688 gcc_unreachable ();
2691 fntype = build_function_type_list (TREE_TYPE (arg),
2692 TREE_TYPE (arg), NULL_TREE);
2693 fallback_fndecl = build_fn_decl (name, fntype);
2696 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2698 tmp = expand_normal (exp);
2699 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2701 /* Truncate the result of floating point optab to integer
2702 via expand_fix (). */
2703 target = gen_reg_rtx (mode);
2704 expand_fix (target, tmp, 0);
2706 return target;
2709 /* Expand a call to one of the builtin math functions doing integer
2710 conversion (lrint).
2711 Return 0 if a normal call should be emitted rather than expanding the
2712 function in-line. EXP is the expression that is a call to the builtin
2713 function; if convenient, the result should be placed in TARGET. */
2715 static rtx
2716 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2718 convert_optab builtin_optab;
2719 rtx op0;
2720 rtx_insn *insns;
2721 tree fndecl = get_callee_fndecl (exp);
2722 tree arg;
2723 machine_mode mode;
2724 enum built_in_function fallback_fn = BUILT_IN_NONE;
2726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2727 gcc_unreachable ();
2729 arg = CALL_EXPR_ARG (exp, 0);
2731 switch (DECL_FUNCTION_CODE (fndecl))
2733 CASE_FLT_FN (BUILT_IN_IRINT):
2734 fallback_fn = BUILT_IN_LRINT;
2735 gcc_fallthrough ();
2736 CASE_FLT_FN (BUILT_IN_LRINT):
2737 CASE_FLT_FN (BUILT_IN_LLRINT):
2738 builtin_optab = lrint_optab;
2739 break;
2741 CASE_FLT_FN (BUILT_IN_IROUND):
2742 fallback_fn = BUILT_IN_LROUND;
2743 gcc_fallthrough ();
2744 CASE_FLT_FN (BUILT_IN_LROUND):
2745 CASE_FLT_FN (BUILT_IN_LLROUND):
2746 builtin_optab = lround_optab;
2747 break;
2749 default:
2750 gcc_unreachable ();
2753 /* There's no easy way to detect the case we need to set EDOM. */
2754 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2755 return NULL_RTX;
2757 /* Make a suitable register to place result in. */
2758 mode = TYPE_MODE (TREE_TYPE (exp));
2760 /* There's no easy way to detect the case we need to set EDOM. */
2761 if (!flag_errno_math)
2763 rtx result = gen_reg_rtx (mode);
2765 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2766 need to expand the argument again. This way, we will not perform
2767 side-effects more the once. */
2768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2770 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2772 start_sequence ();
2774 if (expand_sfix_optab (result, op0, builtin_optab))
2776 /* Output the entire sequence. */
2777 insns = get_insns ();
2778 end_sequence ();
2779 emit_insn (insns);
2780 return result;
2783 /* If we were unable to expand via the builtin, stop the sequence
2784 (without outputting the insns) and call to the library function
2785 with the stabilized argument list. */
2786 end_sequence ();
2789 if (fallback_fn != BUILT_IN_NONE)
2791 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2792 targets, (int) round (x) should never be transformed into
2793 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2794 a call to lround in the hope that the target provides at least some
2795 C99 functions. This should result in the best user experience for
2796 not full C99 targets. */
2797 tree fallback_fndecl = mathfn_built_in_1
2798 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2801 fallback_fndecl, 1, arg);
2803 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2804 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2805 return convert_to_mode (mode, target, 0);
2808 return expand_call (exp, target, target == const0_rtx);
2811 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2812 a normal call should be emitted rather than expanding the function
2813 in-line. EXP is the expression that is a call to the builtin
2814 function; if convenient, the result should be placed in TARGET. */
2816 static rtx
2817 expand_builtin_powi (tree exp, rtx target)
2819 tree arg0, arg1;
2820 rtx op0, op1;
2821 machine_mode mode;
2822 machine_mode mode2;
2824 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2825 return NULL_RTX;
2827 arg0 = CALL_EXPR_ARG (exp, 0);
2828 arg1 = CALL_EXPR_ARG (exp, 1);
2829 mode = TYPE_MODE (TREE_TYPE (exp));
2831 /* Emit a libcall to libgcc. */
2833 /* Mode of the 2nd argument must match that of an int. */
2834 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2836 if (target == NULL_RTX)
2837 target = gen_reg_rtx (mode);
2839 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2840 if (GET_MODE (op0) != mode)
2841 op0 = convert_to_mode (mode, op0, 0);
2842 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2843 if (GET_MODE (op1) != mode2)
2844 op1 = convert_to_mode (mode2, op1, 0);
2846 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2847 target, LCT_CONST, mode,
2848 op0, mode, op1, mode2);
2850 return target;
2853 /* Expand expression EXP which is a call to the strlen builtin. Return
2854 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2855 try to get the result in TARGET, if convenient. */
2857 static rtx
2858 expand_builtin_strlen (tree exp, rtx target,
2859 machine_mode target_mode)
2861 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2862 return NULL_RTX;
2864 struct expand_operand ops[4];
2865 rtx pat;
2866 tree len;
2867 tree src = CALL_EXPR_ARG (exp, 0);
2868 rtx src_reg;
2869 rtx_insn *before_strlen;
2870 machine_mode insn_mode;
2871 enum insn_code icode = CODE_FOR_nothing;
2872 unsigned int align;
2874 /* If the length can be computed at compile-time, return it. */
2875 len = c_strlen (src, 0);
2876 if (len)
2877 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2879 /* If the length can be computed at compile-time and is constant
2880 integer, but there are side-effects in src, evaluate
2881 src for side-effects, then return len.
2882 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2883 can be optimized into: i++; x = 3; */
2884 len = c_strlen (src, 1);
2885 if (len && TREE_CODE (len) == INTEGER_CST)
2887 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2888 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2891 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2893 /* If SRC is not a pointer type, don't do this operation inline. */
2894 if (align == 0)
2895 return NULL_RTX;
2897 /* Bail out if we can't compute strlen in the right mode. */
2898 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2900 icode = optab_handler (strlen_optab, insn_mode);
2901 if (icode != CODE_FOR_nothing)
2902 break;
2904 if (insn_mode == VOIDmode)
2905 return NULL_RTX;
2907 /* Make a place to hold the source address. We will not expand
2908 the actual source until we are sure that the expansion will
2909 not fail -- there are trees that cannot be expanded twice. */
2910 src_reg = gen_reg_rtx (Pmode);
2912 /* Mark the beginning of the strlen sequence so we can emit the
2913 source operand later. */
2914 before_strlen = get_last_insn ();
2916 create_output_operand (&ops[0], target, insn_mode);
2917 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2918 create_integer_operand (&ops[2], 0);
2919 create_integer_operand (&ops[3], align);
2920 if (!maybe_expand_insn (icode, 4, ops))
2921 return NULL_RTX;
2923 /* Check to see if the argument was declared attribute nonstring
2924 and if so, issue a warning since at this point it's not known
2925 to be nul-terminated. */
2926 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2928 /* Now that we are assured of success, expand the source. */
2929 start_sequence ();
2930 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2931 if (pat != src_reg)
2933 #ifdef POINTERS_EXTEND_UNSIGNED
2934 if (GET_MODE (pat) != Pmode)
2935 pat = convert_to_mode (Pmode, pat,
2936 POINTERS_EXTEND_UNSIGNED);
2937 #endif
2938 emit_move_insn (src_reg, pat);
2940 pat = get_insns ();
2941 end_sequence ();
2943 if (before_strlen)
2944 emit_insn_after (pat, before_strlen);
2945 else
2946 emit_insn_before (pat, get_insns ());
2948 /* Return the value in the proper mode for this function. */
2949 if (GET_MODE (ops[0].value) == target_mode)
2950 target = ops[0].value;
2951 else if (target != 0)
2952 convert_move (target, ops[0].value, 0);
2953 else
2954 target = convert_to_mode (target_mode, ops[0].value, 0);
2956 return target;
2959 /* Expand call EXP to the strnlen built-in, returning the result
2960 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2962 static rtx
2963 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2965 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2968 tree src = CALL_EXPR_ARG (exp, 0);
2969 tree bound = CALL_EXPR_ARG (exp, 1);
2971 if (!bound)
2972 return NULL_RTX;
2974 location_t loc = UNKNOWN_LOCATION;
2975 if (EXPR_HAS_LOCATION (exp))
2976 loc = EXPR_LOCATION (exp);
2978 tree maxobjsize = max_object_size ();
2979 tree func = get_callee_fndecl (exp);
2981 tree len = c_strlen (src, 0);
2983 if (TREE_CODE (bound) == INTEGER_CST)
2985 if (!TREE_NO_WARNING (exp)
2986 && tree_int_cst_lt (maxobjsize, bound)
2987 && warning_at (loc, OPT_Wstringop_overflow_,
2988 "%K%qD specified bound %E "
2989 "exceeds maximum object size %E",
2990 exp, func, bound, maxobjsize))
2991 TREE_NO_WARNING (exp) = true;
2993 if (!len || TREE_CODE (len) != INTEGER_CST)
2994 return NULL_RTX;
2996 len = fold_convert_loc (loc, size_type_node, len);
2997 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2998 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3001 if (TREE_CODE (bound) != SSA_NAME)
3002 return NULL_RTX;
3004 wide_int min, max;
3005 enum value_range_type rng = get_range_info (bound, &min, &max);
3006 if (rng != VR_RANGE)
3007 return NULL_RTX;
3009 if (!TREE_NO_WARNING (exp)
3010 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3011 && warning_at (loc, OPT_Wstringop_overflow_,
3012 "%K%qD specified bound [%wu, %wu] "
3013 "exceeds maximum object size %E",
3014 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3015 TREE_NO_WARNING (exp) = true;
3017 if (!len || TREE_CODE (len) != INTEGER_CST)
3018 return NULL_RTX;
3020 if (wi::gtu_p (min, wi::to_wide (len)))
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3024 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3027 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3028 bytes from constant string DATA + OFFSET and return it as target
3029 constant. */
3031 static rtx
3032 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3033 scalar_int_mode mode)
3035 const char *str = (const char *) data;
3037 gcc_assert (offset >= 0
3038 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3039 <= strlen (str) + 1));
3041 return c_readstr (str + offset, mode);
3044 /* LEN specify length of the block of memcpy/memset operation.
3045 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3046 In some cases we can make very likely guess on max size, then we
3047 set it into PROBABLE_MAX_SIZE. */
3049 static void
3050 determine_block_size (tree len, rtx len_rtx,
3051 unsigned HOST_WIDE_INT *min_size,
3052 unsigned HOST_WIDE_INT *max_size,
3053 unsigned HOST_WIDE_INT *probable_max_size)
3055 if (CONST_INT_P (len_rtx))
3057 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3058 return;
3060 else
3062 wide_int min, max;
3063 enum value_range_type range_type = VR_UNDEFINED;
3065 /* Determine bounds from the type. */
3066 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3067 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3068 else
3069 *min_size = 0;
3070 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3071 *probable_max_size = *max_size
3072 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3073 else
3074 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3076 if (TREE_CODE (len) == SSA_NAME)
3077 range_type = get_range_info (len, &min, &max);
3078 if (range_type == VR_RANGE)
3080 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3081 *min_size = min.to_uhwi ();
3082 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3083 *probable_max_size = *max_size = max.to_uhwi ();
3085 else if (range_type == VR_ANTI_RANGE)
3087 /* Anti range 0...N lets us to determine minimal size to N+1. */
3088 if (min == 0)
3090 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3091 *min_size = max.to_uhwi () + 1;
3093 /* Code like
3095 int n;
3096 if (n < 100)
3097 memcpy (a, b, n)
3099 Produce anti range allowing negative values of N. We still
3100 can use the information and make a guess that N is not negative.
3102 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3103 *probable_max_size = min.to_uhwi () - 1;
3106 gcc_checking_assert (*max_size <=
3107 (unsigned HOST_WIDE_INT)
3108 GET_MODE_MASK (GET_MODE (len_rtx)));
3111 /* Try to verify that the sizes and lengths of the arguments to a string
3112 manipulation function given by EXP are within valid bounds and that
3113 the operation does not lead to buffer overflow or read past the end.
3114 Arguments other than EXP may be null. When non-null, the arguments
3115 have the following meaning:
3116 DST is the destination of a copy call or NULL otherwise.
3117 SRC is the source of a copy call or NULL otherwise.
3118 DSTWRITE is the number of bytes written into the destination obtained
3119 from the user-supplied size argument to the function (such as in
3120 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3121 MAXREAD is the user-supplied bound on the length of the source sequence
3122 (such as in strncat(d, s, N). It specifies the upper limit on the number
3123 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3124 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3125 expression EXP is a string function call (as opposed to a memory call
3126 like memcpy). As an exception, SRCSTR can also be an integer denoting
3127 the precomputed size of the source string or object (for functions like
3128 memcpy).
3129 DSTSIZE is the size of the destination object specified by the last
3130 argument to the _chk builtins, typically resulting from the expansion
3131 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3132 DSTSIZE).
3134 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3135 SIZE_MAX.
3137 If the call is successfully verified as safe return true, otherwise
3138 return false. */
3140 static bool
3141 check_access (tree exp, tree, tree, tree dstwrite,
3142 tree maxread, tree srcstr, tree dstsize)
3144 int opt = OPT_Wstringop_overflow_;
3146 /* The size of the largest object is half the address space, or
3147 PTRDIFF_MAX. (This is way too permissive.) */
3148 tree maxobjsize = max_object_size ();
3150 /* Either the length of the source string for string functions or
3151 the size of the source object for raw memory functions. */
3152 tree slen = NULL_TREE;
3154 tree range[2] = { NULL_TREE, NULL_TREE };
3156 /* Set to true when the exact number of bytes written by a string
3157 function like strcpy is not known and the only thing that is
3158 known is that it must be at least one (for the terminating nul). */
3159 bool at_least_one = false;
3160 if (srcstr)
3162 /* SRCSTR is normally a pointer to string but as a special case
3163 it can be an integer denoting the length of a string. */
3164 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3166 /* Try to determine the range of lengths the source string
3167 refers to. If it can be determined and is less than
3168 the upper bound given by MAXREAD add one to it for
3169 the terminating nul. Otherwise, set it to one for
3170 the same reason, or to MAXREAD as appropriate. */
3171 get_range_strlen (srcstr, range);
3172 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3174 if (maxread && tree_int_cst_le (maxread, range[0]))
3175 range[0] = range[1] = maxread;
3176 else
3177 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3178 range[0], size_one_node);
3180 if (maxread && tree_int_cst_le (maxread, range[1]))
3181 range[1] = maxread;
3182 else if (!integer_all_onesp (range[1]))
3183 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3184 range[1], size_one_node);
3186 slen = range[0];
3188 else
3190 at_least_one = true;
3191 slen = size_one_node;
3194 else
3195 slen = srcstr;
3198 if (!dstwrite && !maxread)
3200 /* When the only available piece of data is the object size
3201 there is nothing to do. */
3202 if (!slen)
3203 return true;
3205 /* Otherwise, when the length of the source sequence is known
3206 (as with strlen), set DSTWRITE to it. */
3207 if (!range[0])
3208 dstwrite = slen;
3211 if (!dstsize)
3212 dstsize = maxobjsize;
3214 if (dstwrite)
3215 get_size_range (dstwrite, range);
3217 tree func = get_callee_fndecl (exp);
3219 /* First check the number of bytes to be written against the maximum
3220 object size. */
3221 if (range[0]
3222 && TREE_CODE (range[0]) == INTEGER_CST
3223 && tree_int_cst_lt (maxobjsize, range[0]))
3225 if (TREE_NO_WARNING (exp))
3226 return false;
3228 location_t loc = tree_nonartificial_location (exp);
3229 loc = expansion_point_location_if_in_system_header (loc);
3231 bool warned;
3232 if (range[0] == range[1])
3233 warned = warning_at (loc, opt,
3234 "%K%qD specified size %E "
3235 "exceeds maximum object size %E",
3236 exp, func, range[0], maxobjsize);
3237 else
3238 warned = warning_at (loc, opt,
3239 "%K%qD specified size between %E and %E "
3240 "exceeds maximum object size %E",
3241 exp, func,
3242 range[0], range[1], maxobjsize);
3243 if (warned)
3244 TREE_NO_WARNING (exp) = true;
3246 return false;
3249 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3250 constant, and in range of unsigned HOST_WIDE_INT. */
3251 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3253 /* Next check the number of bytes to be written against the destination
3254 object size. */
3255 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3257 if (range[0]
3258 && TREE_CODE (range[0]) == INTEGER_CST
3259 && ((tree_fits_uhwi_p (dstsize)
3260 && tree_int_cst_lt (dstsize, range[0]))
3261 || (dstwrite
3262 && tree_fits_uhwi_p (dstwrite)
3263 && tree_int_cst_lt (dstwrite, range[0]))))
3265 if (TREE_NO_WARNING (exp))
3266 return false;
3268 location_t loc = tree_nonartificial_location (exp);
3269 loc = expansion_point_location_if_in_system_header (loc);
3271 if (dstwrite == slen && at_least_one)
3273 /* This is a call to strcpy with a destination of 0 size
3274 and a source of unknown length. The call will write
3275 at least one byte past the end of the destination. */
3276 warning_at (loc, opt,
3277 "%K%qD writing %E or more bytes into a region "
3278 "of size %E overflows the destination",
3279 exp, func, range[0], dstsize);
3281 else if (tree_int_cst_equal (range[0], range[1]))
3282 warning_n (loc, opt, tree_to_uhwi (range[0]),
3283 "%K%qD writing %E byte into a region "
3284 "of size %E overflows the destination",
3285 "%K%qD writing %E bytes into a region "
3286 "of size %E overflows the destination",
3287 exp, func, range[0], dstsize);
3288 else if (tree_int_cst_sign_bit (range[1]))
3290 /* Avoid printing the upper bound if it's invalid. */
3291 warning_at (loc, opt,
3292 "%K%qD writing %E or more bytes into a region "
3293 "of size %E overflows the destination",
3294 exp, func, range[0], dstsize);
3296 else
3297 warning_at (loc, opt,
3298 "%K%qD writing between %E and %E bytes into "
3299 "a region of size %E overflows the destination",
3300 exp, func, range[0], range[1],
3301 dstsize);
3303 /* Return error when an overflow has been detected. */
3304 return false;
3308 /* Check the maximum length of the source sequence against the size
3309 of the destination object if known, or against the maximum size
3310 of an object. */
3311 if (maxread)
3313 get_size_range (maxread, range);
3315 /* Use the lower end for MAXREAD from now on. */
3316 if (range[0])
3317 maxread = range[0];
3319 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3321 location_t loc = tree_nonartificial_location (exp);
3322 loc = expansion_point_location_if_in_system_header (loc);
3324 if (tree_int_cst_lt (maxobjsize, range[0]))
3326 if (TREE_NO_WARNING (exp))
3327 return false;
3329 /* Warn about crazy big sizes first since that's more
3330 likely to be meaningful than saying that the bound
3331 is greater than the object size if both are big. */
3332 if (range[0] == range[1])
3333 warning_at (loc, opt,
3334 "%K%qD specified bound %E "
3335 "exceeds maximum object size %E",
3336 exp, func,
3337 range[0], maxobjsize);
3338 else
3339 warning_at (loc, opt,
3340 "%K%qD specified bound between %E and %E "
3341 "exceeds maximum object size %E",
3342 exp, func,
3343 range[0], range[1], maxobjsize);
3345 return false;
3348 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3350 if (TREE_NO_WARNING (exp))
3351 return false;
3353 if (tree_int_cst_equal (range[0], range[1]))
3354 warning_at (loc, opt,
3355 "%K%qD specified bound %E "
3356 "exceeds destination size %E",
3357 exp, func,
3358 range[0], dstsize);
3359 else
3360 warning_at (loc, opt,
3361 "%K%qD specified bound between %E and %E "
3362 "exceeds destination size %E",
3363 exp, func,
3364 range[0], range[1], dstsize);
3365 return false;
3370 /* Check for reading past the end of SRC. */
3371 if (slen
3372 && slen == srcstr
3373 && dstwrite && range[0]
3374 && tree_int_cst_lt (slen, range[0]))
3376 if (TREE_NO_WARNING (exp))
3377 return false;
3379 location_t loc = tree_nonartificial_location (exp);
3381 if (tree_int_cst_equal (range[0], range[1]))
3382 warning_n (loc, opt, tree_to_uhwi (range[0]),
3383 "%K%qD reading %E byte from a region of size %E",
3384 "%K%qD reading %E bytes from a region of size %E",
3385 exp, func, range[0], slen);
3386 else if (tree_int_cst_sign_bit (range[1]))
3388 /* Avoid printing the upper bound if it's invalid. */
3389 warning_at (loc, opt,
3390 "%K%qD reading %E or more bytes from a region "
3391 "of size %E",
3392 exp, func, range[0], slen);
3394 else
3395 warning_at (loc, opt,
3396 "%K%qD reading between %E and %E bytes from a region "
3397 "of size %E",
3398 exp, func, range[0], range[1], slen);
3399 return false;
3402 return true;
3405 /* Helper to compute the size of the object referenced by the DEST
3406 expression which must have pointer type, using Object Size type
3407 OSTYPE (only the least significant 2 bits are used). Return
3408 an estimate of the size of the object if successful or NULL when
3409 the size cannot be determined. When the referenced object involves
3410 a non-constant offset in some range the returned value represents
3411 the largest size given the smallest non-negative offset in the
3412 range. The function is intended for diagnostics and should not
3413 be used to influence code generation or optimization. */
3415 tree
3416 compute_objsize (tree dest, int ostype)
3418 unsigned HOST_WIDE_INT size;
3420 /* Only the two least significant bits are meaningful. */
3421 ostype &= 3;
3423 if (compute_builtin_object_size (dest, ostype, &size))
3424 return build_int_cst (sizetype, size);
3426 if (TREE_CODE (dest) == SSA_NAME)
3428 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3429 if (!is_gimple_assign (stmt))
3430 return NULL_TREE;
3432 dest = gimple_assign_rhs1 (stmt);
3434 tree_code code = gimple_assign_rhs_code (stmt);
3435 if (code == POINTER_PLUS_EXPR)
3437 /* compute_builtin_object_size fails for addresses with
3438 non-constant offsets. Try to determine the range of
3439 such an offset here and use it to adjust the constant
3440 size. */
3441 tree off = gimple_assign_rhs2 (stmt);
3442 if (TREE_CODE (off) == INTEGER_CST)
3444 if (tree size = compute_objsize (dest, ostype))
3446 wide_int wioff = wi::to_wide (off);
3447 wide_int wisiz = wi::to_wide (size);
3449 /* Ignore negative offsets for now. For others,
3450 use the lower bound as the most optimistic
3451 estimate of the (remaining) size. */
3452 if (wi::sign_mask (wioff))
3454 else if (wi::ltu_p (wioff, wisiz))
3455 return wide_int_to_tree (TREE_TYPE (size),
3456 wi::sub (wisiz, wioff));
3457 else
3458 return size_zero_node;
3461 else if (TREE_CODE (off) == SSA_NAME
3462 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3464 wide_int min, max;
3465 enum value_range_type rng = get_range_info (off, &min, &max);
3467 if (rng == VR_RANGE)
3469 if (tree size = compute_objsize (dest, ostype))
3471 wide_int wisiz = wi::to_wide (size);
3473 /* Ignore negative offsets for now. For others,
3474 use the lower bound as the most optimistic
3475 estimate of the (remaining)size. */
3476 if (wi::sign_mask (min))
3478 else if (wi::ltu_p (min, wisiz))
3479 return wide_int_to_tree (TREE_TYPE (size),
3480 wi::sub (wisiz, min));
3481 else
3482 return size_zero_node;
3487 else if (code != ADDR_EXPR)
3488 return NULL_TREE;
3491 /* Unless computing the largest size (for memcpy and other raw memory
3492 functions), try to determine the size of the object from its type. */
3493 if (!ostype)
3494 return NULL_TREE;
3496 if (TREE_CODE (dest) != ADDR_EXPR)
3497 return NULL_TREE;
3499 tree type = TREE_TYPE (dest);
3500 if (TREE_CODE (type) == POINTER_TYPE)
3501 type = TREE_TYPE (type);
3503 type = TYPE_MAIN_VARIANT (type);
3505 if (TREE_CODE (type) == ARRAY_TYPE
3506 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3508 /* Return the constant size unless it's zero (that's a zero-length
3509 array likely at the end of a struct). */
3510 tree size = TYPE_SIZE_UNIT (type);
3511 if (size && TREE_CODE (size) == INTEGER_CST
3512 && !integer_zerop (size))
3513 return size;
3516 return NULL_TREE;
3519 /* Helper to determine and check the sizes of the source and the destination
3520 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3521 call expression, DEST is the destination argument, SRC is the source
3522 argument or null, and LEN is the number of bytes. Use Object Size type-0
3523 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3524 (no overflow or invalid sizes), false otherwise. */
3526 static bool
3527 check_memop_access (tree exp, tree dest, tree src, tree size)
3529 /* For functions like memset and memcpy that operate on raw memory
3530 try to determine the size of the largest source and destination
3531 object using type-0 Object Size regardless of the object size
3532 type specified by the option. */
3533 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3534 tree dstsize = compute_objsize (dest, 0);
3536 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3537 srcsize, dstsize);
3540 /* Validate memchr arguments without performing any expansion.
3541 Return NULL_RTX. */
3543 static rtx
3544 expand_builtin_memchr (tree exp, rtx)
3546 if (!validate_arglist (exp,
3547 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3548 return NULL_RTX;
3550 tree arg1 = CALL_EXPR_ARG (exp, 0);
3551 tree len = CALL_EXPR_ARG (exp, 2);
3553 /* Diagnose calls where the specified length exceeds the size
3554 of the object. */
3555 if (warn_stringop_overflow)
3557 tree size = compute_objsize (arg1, 0);
3558 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3559 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3562 return NULL_RTX;
3565 /* Expand a call EXP to the memcpy builtin.
3566 Return NULL_RTX if we failed, the caller should emit a normal call,
3567 otherwise try to get the result in TARGET, if convenient (and in
3568 mode MODE if that's convenient). */
3570 static rtx
3571 expand_builtin_memcpy (tree exp, rtx target)
3573 if (!validate_arglist (exp,
3574 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 return NULL_RTX;
3577 tree dest = CALL_EXPR_ARG (exp, 0);
3578 tree src = CALL_EXPR_ARG (exp, 1);
3579 tree len = CALL_EXPR_ARG (exp, 2);
3581 check_memop_access (exp, dest, src, len);
3583 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3584 /*endp=*/ 0);
3587 /* Check a call EXP to the memmove built-in for validity.
3588 Return NULL_RTX on both success and failure. */
3590 static rtx
3591 expand_builtin_memmove (tree exp, rtx)
3593 if (!validate_arglist (exp,
3594 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3595 return NULL_RTX;
3597 tree dest = CALL_EXPR_ARG (exp, 0);
3598 tree src = CALL_EXPR_ARG (exp, 1);
3599 tree len = CALL_EXPR_ARG (exp, 2);
3601 check_memop_access (exp, dest, src, len);
3603 return NULL_RTX;
3606 /* Expand a call EXP to the mempcpy builtin.
3607 Return NULL_RTX if we failed; the caller should emit a normal call,
3608 otherwise try to get the result in TARGET, if convenient (and in
3609 mode MODE if that's convenient). If ENDP is 0 return the
3610 destination pointer, if ENDP is 1 return the end pointer ala
3611 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3612 stpcpy. */
3614 static rtx
3615 expand_builtin_mempcpy (tree exp, rtx target)
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
3625 /* Policy does not generally allow using compute_objsize (which
3626 is used internally by check_memop_size) to change code generation
3627 or drive optimization decisions.
3629 In this instance it is safe because the code we generate has
3630 the same semantics regardless of the return value of
3631 check_memop_sizes. Exactly the same amount of data is copied
3632 and the return value is exactly the same in both cases.
3634 Furthermore, check_memop_size always uses mode 0 for the call to
3635 compute_objsize, so the imprecise nature of compute_objsize is
3636 avoided. */
3638 /* Avoid expanding mempcpy into memcpy when the call is determined
3639 to overflow the buffer. This also prevents the same overflow
3640 from being diagnosed again when expanding memcpy. */
3641 if (!check_memop_access (exp, dest, src, len))
3642 return NULL_RTX;
3644 return expand_builtin_mempcpy_args (dest, src, len,
3645 target, exp, /*endp=*/ 1);
3648 /* Helper function to do the actual work for expand of memory copy family
3649 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3650 of memory from SRC to DEST and assign to TARGET if convenient.
3651 If ENDP is 0 return the
3652 destination pointer, if ENDP is 1 return the end pointer ala
3653 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3654 stpcpy. */
3656 static rtx
3657 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3658 rtx target, tree exp, int endp)
3660 const char *src_str;
3661 unsigned int src_align = get_pointer_alignment (src);
3662 unsigned int dest_align = get_pointer_alignment (dest);
3663 rtx dest_mem, src_mem, dest_addr, len_rtx;
3664 HOST_WIDE_INT expected_size = -1;
3665 unsigned int expected_align = 0;
3666 unsigned HOST_WIDE_INT min_size;
3667 unsigned HOST_WIDE_INT max_size;
3668 unsigned HOST_WIDE_INT probable_max_size;
3670 /* If DEST is not a pointer type, call the normal function. */
3671 if (dest_align == 0)
3672 return NULL_RTX;
3674 /* If either SRC is not a pointer type, don't do this
3675 operation in-line. */
3676 if (src_align == 0)
3677 return NULL_RTX;
3679 if (currently_expanding_gimple_stmt)
3680 stringop_block_profile (currently_expanding_gimple_stmt,
3681 &expected_align, &expected_size);
3683 if (expected_align < dest_align)
3684 expected_align = dest_align;
3685 dest_mem = get_memory_rtx (dest, len);
3686 set_mem_align (dest_mem, dest_align);
3687 len_rtx = expand_normal (len);
3688 determine_block_size (len, len_rtx, &min_size, &max_size,
3689 &probable_max_size);
3690 src_str = c_getstr (src);
3692 /* If SRC is a string constant and block move would be done
3693 by pieces, we can avoid loading the string from memory
3694 and only stored the computed constants. */
3695 if (src_str
3696 && CONST_INT_P (len_rtx)
3697 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3698 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3699 CONST_CAST (char *, src_str),
3700 dest_align, false))
3702 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3703 builtin_memcpy_read_str,
3704 CONST_CAST (char *, src_str),
3705 dest_align, false, endp);
3706 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3707 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3708 return dest_mem;
3711 src_mem = get_memory_rtx (src, len);
3712 set_mem_align (src_mem, src_align);
3714 /* Copy word part most expediently. */
3715 enum block_op_methods method = BLOCK_OP_NORMAL;
3716 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3717 method = BLOCK_OP_TAILCALL;
3718 if (endp == 1 && target != const0_rtx)
3719 method = BLOCK_OP_NO_LIBCALL_RET;
3720 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3721 expected_align, expected_size,
3722 min_size, max_size, probable_max_size);
3723 if (dest_addr == pc_rtx)
3724 return NULL_RTX;
3726 if (dest_addr == 0)
3728 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3729 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3732 if (endp && target != const0_rtx)
3734 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3735 /* stpcpy pointer to last byte. */
3736 if (endp == 2)
3737 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3740 return dest_addr;
3743 static rtx
3744 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3745 rtx target, tree orig_exp, int endp)
3747 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3748 endp);
3751 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3752 we failed, the caller should emit a normal call, otherwise try to
3753 get the result in TARGET, if convenient. If ENDP is 0 return the
3754 destination pointer, if ENDP is 1 return the end pointer ala
3755 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3756 stpcpy. */
3758 static rtx
3759 expand_movstr (tree dest, tree src, rtx target, int endp)
3761 struct expand_operand ops[3];
3762 rtx dest_mem;
3763 rtx src_mem;
3765 if (!targetm.have_movstr ())
3766 return NULL_RTX;
3768 dest_mem = get_memory_rtx (dest, NULL);
3769 src_mem = get_memory_rtx (src, NULL);
3770 if (!endp)
3772 target = force_reg (Pmode, XEXP (dest_mem, 0));
3773 dest_mem = replace_equiv_address (dest_mem, target);
3776 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3777 create_fixed_operand (&ops[1], dest_mem);
3778 create_fixed_operand (&ops[2], src_mem);
3779 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3780 return NULL_RTX;
3782 if (endp && target != const0_rtx)
3784 target = ops[0].value;
3785 /* movstr is supposed to set end to the address of the NUL
3786 terminator. If the caller requested a mempcpy-like return value,
3787 adjust it. */
3788 if (endp == 1)
3790 rtx tem = plus_constant (GET_MODE (target),
3791 gen_lowpart (GET_MODE (target), target), 1);
3792 emit_move_insn (target, force_operand (tem, NULL_RTX));
3795 return target;
3798 /* Do some very basic size validation of a call to the strcpy builtin
3799 given by EXP. Return NULL_RTX to have the built-in expand to a call
3800 to the library function. */
3802 static rtx
3803 expand_builtin_strcat (tree exp, rtx)
3805 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3806 || !warn_stringop_overflow)
3807 return NULL_RTX;
3809 tree dest = CALL_EXPR_ARG (exp, 0);
3810 tree src = CALL_EXPR_ARG (exp, 1);
3812 /* There is no way here to determine the length of the string in
3813 the destination to which the SRC string is being appended so
3814 just diagnose cases when the souce string is longer than
3815 the destination object. */
3817 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3819 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3820 destsize);
3822 return NULL_RTX;
3825 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3826 NULL_RTX if we failed the caller should emit a normal call, otherwise
3827 try to get the result in TARGET, if convenient (and in mode MODE if that's
3828 convenient). */
3830 static rtx
3831 expand_builtin_strcpy (tree exp, rtx target)
3833 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3834 return NULL_RTX;
3836 tree dest = CALL_EXPR_ARG (exp, 0);
3837 tree src = CALL_EXPR_ARG (exp, 1);
3839 if (warn_stringop_overflow)
3841 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3842 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3843 src, destsize);
3846 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3848 /* Check to see if the argument was declared attribute nonstring
3849 and if so, issue a warning since at this point it's not known
3850 to be nul-terminated. */
3851 tree fndecl = get_callee_fndecl (exp);
3852 maybe_warn_nonstring_arg (fndecl, exp);
3853 return ret;
3856 return NULL_RTX;
3859 /* Helper function to do the actual work for expand_builtin_strcpy. The
3860 arguments to the builtin_strcpy call DEST and SRC are broken out
3861 so that this can also be called without constructing an actual CALL_EXPR.
3862 The other arguments and return value are the same as for
3863 expand_builtin_strcpy. */
3865 static rtx
3866 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3868 return expand_movstr (dest, src, target, /*endp=*/0);
3871 /* Expand a call EXP to the stpcpy builtin.
3872 Return NULL_RTX if we failed the caller should emit a normal call,
3873 otherwise try to get the result in TARGET, if convenient (and in
3874 mode MODE if that's convenient). */
3876 static rtx
3877 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3879 tree dst, src;
3880 location_t loc = EXPR_LOCATION (exp);
3882 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3883 return NULL_RTX;
3885 dst = CALL_EXPR_ARG (exp, 0);
3886 src = CALL_EXPR_ARG (exp, 1);
3888 if (warn_stringop_overflow)
3890 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3891 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3892 src, destsize);
3895 /* If return value is ignored, transform stpcpy into strcpy. */
3896 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3898 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3899 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3900 return expand_expr (result, target, mode, EXPAND_NORMAL);
3902 else
3904 tree len, lenp1;
3905 rtx ret;
3907 /* Ensure we get an actual string whose length can be evaluated at
3908 compile-time, not an expression containing a string. This is
3909 because the latter will potentially produce pessimized code
3910 when used to produce the return value. */
3911 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3912 return expand_movstr (dst, src, target, /*endp=*/2);
3914 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3915 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3916 target, exp, /*endp=*/2);
3918 if (ret)
3919 return ret;
3921 if (TREE_CODE (len) == INTEGER_CST)
3923 rtx len_rtx = expand_normal (len);
3925 if (CONST_INT_P (len_rtx))
3927 ret = expand_builtin_strcpy_args (dst, src, target);
3929 if (ret)
3931 if (! target)
3933 if (mode != VOIDmode)
3934 target = gen_reg_rtx (mode);
3935 else
3936 target = gen_reg_rtx (GET_MODE (ret));
3938 if (GET_MODE (target) != GET_MODE (ret))
3939 ret = gen_lowpart (GET_MODE (target), ret);
3941 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3942 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3943 gcc_assert (ret);
3945 return target;
3950 return expand_movstr (dst, src, target, /*endp=*/2);
3954 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3955 arguments while being careful to avoid duplicate warnings (which could
3956 be issued if the expander were to expand the call, resulting in it
3957 being emitted in expand_call(). */
3959 static rtx
3960 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3962 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3964 /* The call has been successfully expanded. Check for nonstring
3965 arguments and issue warnings as appropriate. */
3966 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3967 return ret;
3970 return NULL_RTX;
3973 /* Check a call EXP to the stpncpy built-in for validity.
3974 Return NULL_RTX on both success and failure. */
3976 static rtx
3977 expand_builtin_stpncpy (tree exp, rtx)
3979 if (!validate_arglist (exp,
3980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3981 || !warn_stringop_overflow)
3982 return NULL_RTX;
3984 /* The source and destination of the call. */
3985 tree dest = CALL_EXPR_ARG (exp, 0);
3986 tree src = CALL_EXPR_ARG (exp, 1);
3988 /* The exact number of bytes to write (not the maximum). */
3989 tree len = CALL_EXPR_ARG (exp, 2);
3991 /* The size of the destination object. */
3992 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3994 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3996 return NULL_RTX;
3999 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4000 bytes from constant string DATA + OFFSET and return it as target
4001 constant. */
4004 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4005 scalar_int_mode mode)
4007 const char *str = (const char *) data;
4009 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4010 return const0_rtx;
4012 return c_readstr (str + offset, mode);
4015 /* Helper to check the sizes of sequences and the destination of calls
4016 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4017 success (no overflow or invalid sizes), false otherwise. */
4019 static bool
4020 check_strncat_sizes (tree exp, tree objsize)
4022 tree dest = CALL_EXPR_ARG (exp, 0);
4023 tree src = CALL_EXPR_ARG (exp, 1);
4024 tree maxread = CALL_EXPR_ARG (exp, 2);
4026 /* Try to determine the range of lengths that the source expression
4027 refers to. */
4028 tree lenrange[2];
4029 get_range_strlen (src, lenrange);
4031 /* Try to verify that the destination is big enough for the shortest
4032 string. */
4034 if (!objsize && warn_stringop_overflow)
4036 /* If it hasn't been provided by __strncat_chk, try to determine
4037 the size of the destination object into which the source is
4038 being copied. */
4039 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4042 /* Add one for the terminating nul. */
4043 tree srclen = (lenrange[0]
4044 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4045 size_one_node)
4046 : NULL_TREE);
4048 /* The strncat function copies at most MAXREAD bytes and always appends
4049 the terminating nul so the specified upper bound should never be equal
4050 to (or greater than) the size of the destination. */
4051 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4052 && tree_int_cst_equal (objsize, maxread))
4054 location_t loc = tree_nonartificial_location (exp);
4055 loc = expansion_point_location_if_in_system_header (loc);
4057 warning_at (loc, OPT_Wstringop_overflow_,
4058 "%K%qD specified bound %E equals destination size",
4059 exp, get_callee_fndecl (exp), maxread);
4061 return false;
4064 if (!srclen
4065 || (maxread && tree_fits_uhwi_p (maxread)
4066 && tree_fits_uhwi_p (srclen)
4067 && tree_int_cst_lt (maxread, srclen)))
4068 srclen = maxread;
4070 /* The number of bytes to write is LEN but check_access will also
4071 check SRCLEN if LEN's value isn't known. */
4072 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4073 objsize);
4076 /* Similar to expand_builtin_strcat, do some very basic size validation
4077 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4078 the built-in expand to a call to the library function. */
4080 static rtx
4081 expand_builtin_strncat (tree exp, rtx)
4083 if (!validate_arglist (exp,
4084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4085 || !warn_stringop_overflow)
4086 return NULL_RTX;
4088 tree dest = CALL_EXPR_ARG (exp, 0);
4089 tree src = CALL_EXPR_ARG (exp, 1);
4090 /* The upper bound on the number of bytes to write. */
4091 tree maxread = CALL_EXPR_ARG (exp, 2);
4092 /* The length of the source sequence. */
4093 tree slen = c_strlen (src, 1);
4095 /* Try to determine the range of lengths that the source expression
4096 refers to. */
4097 tree lenrange[2];
4098 if (slen)
4099 lenrange[0] = lenrange[1] = slen;
4100 else
4101 get_range_strlen (src, lenrange);
4103 /* Try to verify that the destination is big enough for the shortest
4104 string. First try to determine the size of the destination object
4105 into which the source is being copied. */
4106 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4108 /* Add one for the terminating nul. */
4109 tree srclen = (lenrange[0]
4110 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4111 size_one_node)
4112 : NULL_TREE);
4114 /* The strncat function copies at most MAXREAD bytes and always appends
4115 the terminating nul so the specified upper bound should never be equal
4116 to (or greater than) the size of the destination. */
4117 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4118 && tree_int_cst_equal (destsize, maxread))
4120 location_t loc = tree_nonartificial_location (exp);
4121 loc = expansion_point_location_if_in_system_header (loc);
4123 warning_at (loc, OPT_Wstringop_overflow_,
4124 "%K%qD specified bound %E equals destination size",
4125 exp, get_callee_fndecl (exp), maxread);
4127 return NULL_RTX;
4130 if (!srclen
4131 || (maxread && tree_fits_uhwi_p (maxread)
4132 && tree_fits_uhwi_p (srclen)
4133 && tree_int_cst_lt (maxread, srclen)))
4134 srclen = maxread;
4136 /* The number of bytes to write is SRCLEN. */
4137 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4139 return NULL_RTX;
4142 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4143 NULL_RTX if we failed the caller should emit a normal call. */
4145 static rtx
4146 expand_builtin_strncpy (tree exp, rtx target)
4148 location_t loc = EXPR_LOCATION (exp);
4150 if (validate_arglist (exp,
4151 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4153 tree dest = CALL_EXPR_ARG (exp, 0);
4154 tree src = CALL_EXPR_ARG (exp, 1);
4155 /* The number of bytes to write (not the maximum). */
4156 tree len = CALL_EXPR_ARG (exp, 2);
4157 /* The length of the source sequence. */
4158 tree slen = c_strlen (src, 1);
4160 if (warn_stringop_overflow)
4162 tree destsize = compute_objsize (dest,
4163 warn_stringop_overflow - 1);
4165 /* The number of bytes to write is LEN but check_access will also
4166 check SLEN if LEN's value isn't known. */
4167 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4168 destsize);
4171 /* We must be passed a constant len and src parameter. */
4172 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4173 return NULL_RTX;
4175 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4177 /* We're required to pad with trailing zeros if the requested
4178 len is greater than strlen(s2)+1. In that case try to
4179 use store_by_pieces, if it fails, punt. */
4180 if (tree_int_cst_lt (slen, len))
4182 unsigned int dest_align = get_pointer_alignment (dest);
4183 const char *p = c_getstr (src);
4184 rtx dest_mem;
4186 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4187 || !can_store_by_pieces (tree_to_uhwi (len),
4188 builtin_strncpy_read_str,
4189 CONST_CAST (char *, p),
4190 dest_align, false))
4191 return NULL_RTX;
4193 dest_mem = get_memory_rtx (dest, len);
4194 store_by_pieces (dest_mem, tree_to_uhwi (len),
4195 builtin_strncpy_read_str,
4196 CONST_CAST (char *, p), dest_align, false, 0);
4197 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4198 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4199 return dest_mem;
4202 return NULL_RTX;
4205 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4206 bytes from constant string DATA + OFFSET and return it as target
4207 constant. */
4210 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4211 scalar_int_mode mode)
4213 const char *c = (const char *) data;
4214 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4216 memset (p, *c, GET_MODE_SIZE (mode));
4218 return c_readstr (p, mode);
4221 /* Callback routine for store_by_pieces. Return the RTL of a register
4222 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4223 char value given in the RTL register data. For example, if mode is
4224 4 bytes wide, return the RTL for 0x01010101*data. */
4226 static rtx
4227 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4228 scalar_int_mode mode)
4230 rtx target, coeff;
4231 size_t size;
4232 char *p;
4234 size = GET_MODE_SIZE (mode);
4235 if (size == 1)
4236 return (rtx) data;
4238 p = XALLOCAVEC (char, size);
4239 memset (p, 1, size);
4240 coeff = c_readstr (p, mode);
4242 target = convert_to_mode (mode, (rtx) data, 1);
4243 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4244 return force_reg (mode, target);
4247 /* Expand expression EXP, which is a call to the memset builtin. Return
4248 NULL_RTX if we failed the caller should emit a normal call, otherwise
4249 try to get the result in TARGET, if convenient (and in mode MODE if that's
4250 convenient). */
4252 static rtx
4253 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4259 tree dest = CALL_EXPR_ARG (exp, 0);
4260 tree val = CALL_EXPR_ARG (exp, 1);
4261 tree len = CALL_EXPR_ARG (exp, 2);
4263 check_memop_access (exp, dest, NULL_TREE, len);
4265 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4268 /* Helper function to do the actual work for expand_builtin_memset. The
4269 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4270 so that this can also be called without constructing an actual CALL_EXPR.
4271 The other arguments and return value are the same as for
4272 expand_builtin_memset. */
4274 static rtx
4275 expand_builtin_memset_args (tree dest, tree val, tree len,
4276 rtx target, machine_mode mode, tree orig_exp)
4278 tree fndecl, fn;
4279 enum built_in_function fcode;
4280 machine_mode val_mode;
4281 char c;
4282 unsigned int dest_align;
4283 rtx dest_mem, dest_addr, len_rtx;
4284 HOST_WIDE_INT expected_size = -1;
4285 unsigned int expected_align = 0;
4286 unsigned HOST_WIDE_INT min_size;
4287 unsigned HOST_WIDE_INT max_size;
4288 unsigned HOST_WIDE_INT probable_max_size;
4290 dest_align = get_pointer_alignment (dest);
4292 /* If DEST is not a pointer type, don't do this operation in-line. */
4293 if (dest_align == 0)
4294 return NULL_RTX;
4296 if (currently_expanding_gimple_stmt)
4297 stringop_block_profile (currently_expanding_gimple_stmt,
4298 &expected_align, &expected_size);
4300 if (expected_align < dest_align)
4301 expected_align = dest_align;
4303 /* If the LEN parameter is zero, return DEST. */
4304 if (integer_zerop (len))
4306 /* Evaluate and ignore VAL in case it has side-effects. */
4307 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4308 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4311 /* Stabilize the arguments in case we fail. */
4312 dest = builtin_save_expr (dest);
4313 val = builtin_save_expr (val);
4314 len = builtin_save_expr (len);
4316 len_rtx = expand_normal (len);
4317 determine_block_size (len, len_rtx, &min_size, &max_size,
4318 &probable_max_size);
4319 dest_mem = get_memory_rtx (dest, len);
4320 val_mode = TYPE_MODE (unsigned_char_type_node);
4322 if (TREE_CODE (val) != INTEGER_CST)
4324 rtx val_rtx;
4326 val_rtx = expand_normal (val);
4327 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4329 /* Assume that we can memset by pieces if we can store
4330 * the coefficients by pieces (in the required modes).
4331 * We can't pass builtin_memset_gen_str as that emits RTL. */
4332 c = 1;
4333 if (tree_fits_uhwi_p (len)
4334 && can_store_by_pieces (tree_to_uhwi (len),
4335 builtin_memset_read_str, &c, dest_align,
4336 true))
4338 val_rtx = force_reg (val_mode, val_rtx);
4339 store_by_pieces (dest_mem, tree_to_uhwi (len),
4340 builtin_memset_gen_str, val_rtx, dest_align,
4341 true, 0);
4343 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4344 dest_align, expected_align,
4345 expected_size, min_size, max_size,
4346 probable_max_size))
4347 goto do_libcall;
4349 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4350 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4351 return dest_mem;
4354 if (target_char_cast (val, &c))
4355 goto do_libcall;
4357 if (c)
4359 if (tree_fits_uhwi_p (len)
4360 && can_store_by_pieces (tree_to_uhwi (len),
4361 builtin_memset_read_str, &c, dest_align,
4362 true))
4363 store_by_pieces (dest_mem, tree_to_uhwi (len),
4364 builtin_memset_read_str, &c, dest_align, true, 0);
4365 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4366 gen_int_mode (c, val_mode),
4367 dest_align, expected_align,
4368 expected_size, min_size, max_size,
4369 probable_max_size))
4370 goto do_libcall;
4372 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4373 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4374 return dest_mem;
4377 set_mem_align (dest_mem, dest_align);
4378 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4379 CALL_EXPR_TAILCALL (orig_exp)
4380 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4381 expected_align, expected_size,
4382 min_size, max_size,
4383 probable_max_size);
4385 if (dest_addr == 0)
4387 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4388 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4391 return dest_addr;
4393 do_libcall:
4394 fndecl = get_callee_fndecl (orig_exp);
4395 fcode = DECL_FUNCTION_CODE (fndecl);
4396 if (fcode == BUILT_IN_MEMSET)
4397 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4398 dest, val, len);
4399 else if (fcode == BUILT_IN_BZERO)
4400 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4401 dest, len);
4402 else
4403 gcc_unreachable ();
4404 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4405 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4406 return expand_call (fn, target, target == const0_rtx);
4409 /* Expand expression EXP, which is a call to the bzero builtin. Return
4410 NULL_RTX if we failed the caller should emit a normal call. */
4412 static rtx
4413 expand_builtin_bzero (tree exp)
4415 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4416 return NULL_RTX;
4418 tree dest = CALL_EXPR_ARG (exp, 0);
4419 tree size = CALL_EXPR_ARG (exp, 1);
4421 check_memop_access (exp, dest, NULL_TREE, size);
4423 /* New argument list transforming bzero(ptr x, int y) to
4424 memset(ptr x, int 0, size_t y). This is done this way
4425 so that if it isn't expanded inline, we fallback to
4426 calling bzero instead of memset. */
4428 location_t loc = EXPR_LOCATION (exp);
4430 return expand_builtin_memset_args (dest, integer_zero_node,
4431 fold_convert_loc (loc,
4432 size_type_node, size),
4433 const0_rtx, VOIDmode, exp);
4436 /* Try to expand cmpstr operation ICODE with the given operands.
4437 Return the result rtx on success, otherwise return null. */
4439 static rtx
4440 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4441 HOST_WIDE_INT align)
4443 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4445 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4446 target = NULL_RTX;
4448 struct expand_operand ops[4];
4449 create_output_operand (&ops[0], target, insn_mode);
4450 create_fixed_operand (&ops[1], arg1_rtx);
4451 create_fixed_operand (&ops[2], arg2_rtx);
4452 create_integer_operand (&ops[3], align);
4453 if (maybe_expand_insn (icode, 4, ops))
4454 return ops[0].value;
4455 return NULL_RTX;
4458 /* Expand expression EXP, which is a call to the memcmp built-in function.
4459 Return NULL_RTX if we failed and the caller should emit a normal call,
4460 otherwise try to get the result in TARGET, if convenient.
4461 RESULT_EQ is true if we can relax the returned value to be either zero
4462 or nonzero, without caring about the sign. */
4464 static rtx
4465 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4467 if (!validate_arglist (exp,
4468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4469 return NULL_RTX;
4471 tree arg1 = CALL_EXPR_ARG (exp, 0);
4472 tree arg2 = CALL_EXPR_ARG (exp, 1);
4473 tree len = CALL_EXPR_ARG (exp, 2);
4474 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4475 bool no_overflow = true;
4477 /* Diagnose calls where the specified length exceeds the size of either
4478 object. */
4479 tree size = compute_objsize (arg1, 0);
4480 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4481 len, /*maxread=*/NULL_TREE, size,
4482 /*objsize=*/NULL_TREE);
4483 if (no_overflow)
4485 size = compute_objsize (arg2, 0);
4486 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4487 len, /*maxread=*/NULL_TREE, size,
4488 /*objsize=*/NULL_TREE);
4491 /* If the specified length exceeds the size of either object,
4492 call the function. */
4493 if (!no_overflow)
4494 return NULL_RTX;
4496 /* Due to the performance benefit, always inline the calls first
4497 when result_eq is false. */
4498 rtx result = NULL_RTX;
4500 if (!result_eq && fcode != BUILT_IN_BCMP)
4502 result = inline_expand_builtin_string_cmp (exp, target);
4503 if (result)
4504 return result;
4507 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4508 location_t loc = EXPR_LOCATION (exp);
4510 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4511 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4513 /* If we don't have POINTER_TYPE, call the function. */
4514 if (arg1_align == 0 || arg2_align == 0)
4515 return NULL_RTX;
4517 rtx arg1_rtx = get_memory_rtx (arg1, len);
4518 rtx arg2_rtx = get_memory_rtx (arg2, len);
4519 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4521 /* Set MEM_SIZE as appropriate. */
4522 if (CONST_INT_P (len_rtx))
4524 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4525 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4528 by_pieces_constfn constfn = NULL;
4530 const char *src_str = c_getstr (arg2);
4531 if (result_eq && src_str == NULL)
4533 src_str = c_getstr (arg1);
4534 if (src_str != NULL)
4535 std::swap (arg1_rtx, arg2_rtx);
4538 /* If SRC is a string constant and block move would be done
4539 by pieces, we can avoid loading the string from memory
4540 and only stored the computed constants. */
4541 if (src_str
4542 && CONST_INT_P (len_rtx)
4543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4544 constfn = builtin_memcpy_read_str;
4546 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4547 TREE_TYPE (len), target,
4548 result_eq, constfn,
4549 CONST_CAST (char *, src_str));
4551 if (result)
4553 /* Return the value in the proper mode for this function. */
4554 if (GET_MODE (result) == mode)
4555 return result;
4557 if (target != 0)
4559 convert_move (target, result, 0);
4560 return target;
4563 return convert_to_mode (mode, result, 0);
4566 return NULL_RTX;
4569 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4570 if we failed the caller should emit a normal call, otherwise try to get
4571 the result in TARGET, if convenient. */
4573 static rtx
4574 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4577 return NULL_RTX;
4579 /* Due to the performance benefit, always inline the calls first. */
4580 rtx result = NULL_RTX;
4581 result = inline_expand_builtin_string_cmp (exp, target);
4582 if (result)
4583 return result;
4585 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4586 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4587 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4588 return NULL_RTX;
4590 tree arg1 = CALL_EXPR_ARG (exp, 0);
4591 tree arg2 = CALL_EXPR_ARG (exp, 1);
4593 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4594 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4596 /* If we don't have POINTER_TYPE, call the function. */
4597 if (arg1_align == 0 || arg2_align == 0)
4598 return NULL_RTX;
4600 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4601 arg1 = builtin_save_expr (arg1);
4602 arg2 = builtin_save_expr (arg2);
4604 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4605 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4607 /* Try to call cmpstrsi. */
4608 if (cmpstr_icode != CODE_FOR_nothing)
4609 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4610 MIN (arg1_align, arg2_align));
4612 /* Try to determine at least one length and call cmpstrnsi. */
4613 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4615 tree len;
4616 rtx arg3_rtx;
4618 tree len1 = c_strlen (arg1, 1);
4619 tree len2 = c_strlen (arg2, 1);
4621 if (len1)
4622 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4623 if (len2)
4624 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4626 /* If we don't have a constant length for the first, use the length
4627 of the second, if we know it. We don't require a constant for
4628 this case; some cost analysis could be done if both are available
4629 but neither is constant. For now, assume they're equally cheap,
4630 unless one has side effects. If both strings have constant lengths,
4631 use the smaller. */
4633 if (!len1)
4634 len = len2;
4635 else if (!len2)
4636 len = len1;
4637 else if (TREE_SIDE_EFFECTS (len1))
4638 len = len2;
4639 else if (TREE_SIDE_EFFECTS (len2))
4640 len = len1;
4641 else if (TREE_CODE (len1) != INTEGER_CST)
4642 len = len2;
4643 else if (TREE_CODE (len2) != INTEGER_CST)
4644 len = len1;
4645 else if (tree_int_cst_lt (len1, len2))
4646 len = len1;
4647 else
4648 len = len2;
4650 /* If both arguments have side effects, we cannot optimize. */
4651 if (len && !TREE_SIDE_EFFECTS (len))
4653 arg3_rtx = expand_normal (len);
4654 result = expand_cmpstrn_or_cmpmem
4655 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4656 arg3_rtx, MIN (arg1_align, arg2_align));
4660 tree fndecl = get_callee_fndecl (exp);
4661 if (result)
4663 /* Check to see if the argument was declared attribute nonstring
4664 and if so, issue a warning since at this point it's not known
4665 to be nul-terminated. */
4666 maybe_warn_nonstring_arg (fndecl, exp);
4668 /* Return the value in the proper mode for this function. */
4669 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4670 if (GET_MODE (result) == mode)
4671 return result;
4672 if (target == 0)
4673 return convert_to_mode (mode, result, 0);
4674 convert_move (target, result, 0);
4675 return target;
4678 /* Expand the library call ourselves using a stabilized argument
4679 list to avoid re-evaluating the function's arguments twice. */
4680 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4681 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4682 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4683 return expand_call (fn, target, target == const0_rtx);
4686 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4687 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4688 the result in TARGET, if convenient. */
4690 static rtx
4691 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4692 ATTRIBUTE_UNUSED machine_mode mode)
4694 if (!validate_arglist (exp,
4695 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4696 return NULL_RTX;
4698 /* Due to the performance benefit, always inline the calls first. */
4699 rtx result = NULL_RTX;
4700 result = inline_expand_builtin_string_cmp (exp, target);
4701 if (result)
4702 return result;
4704 /* If c_strlen can determine an expression for one of the string
4705 lengths, and it doesn't have side effects, then emit cmpstrnsi
4706 using length MIN(strlen(string)+1, arg3). */
4707 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4708 if (cmpstrn_icode == CODE_FOR_nothing)
4709 return NULL_RTX;
4711 tree len;
4713 tree arg1 = CALL_EXPR_ARG (exp, 0);
4714 tree arg2 = CALL_EXPR_ARG (exp, 1);
4715 tree arg3 = CALL_EXPR_ARG (exp, 2);
4717 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4718 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4720 tree len1 = c_strlen (arg1, 1);
4721 tree len2 = c_strlen (arg2, 1);
4723 location_t loc = EXPR_LOCATION (exp);
4725 if (len1)
4726 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4727 if (len2)
4728 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4730 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4732 /* If we don't have a constant length for the first, use the length
4733 of the second, if we know it. If neither string is constant length,
4734 use the given length argument. We don't require a constant for
4735 this case; some cost analysis could be done if both are available
4736 but neither is constant. For now, assume they're equally cheap,
4737 unless one has side effects. If both strings have constant lengths,
4738 use the smaller. */
4740 if (!len1 && !len2)
4741 len = len3;
4742 else if (!len1)
4743 len = len2;
4744 else if (!len2)
4745 len = len1;
4746 else if (TREE_SIDE_EFFECTS (len1))
4747 len = len2;
4748 else if (TREE_SIDE_EFFECTS (len2))
4749 len = len1;
4750 else if (TREE_CODE (len1) != INTEGER_CST)
4751 len = len2;
4752 else if (TREE_CODE (len2) != INTEGER_CST)
4753 len = len1;
4754 else if (tree_int_cst_lt (len1, len2))
4755 len = len1;
4756 else
4757 len = len2;
4759 /* If we are not using the given length, we must incorporate it here.
4760 The actual new length parameter will be MIN(len,arg3) in this case. */
4761 if (len != len3)
4763 len = fold_convert_loc (loc, sizetype, len);
4764 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4766 rtx arg1_rtx = get_memory_rtx (arg1, len);
4767 rtx arg2_rtx = get_memory_rtx (arg2, len);
4768 rtx arg3_rtx = expand_normal (len);
4769 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4770 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4771 MIN (arg1_align, arg2_align));
4773 tree fndecl = get_callee_fndecl (exp);
4774 if (result)
4776 /* Check to see if the argument was declared attribute nonstring
4777 and if so, issue a warning since at this point it's not known
4778 to be nul-terminated. */
4779 maybe_warn_nonstring_arg (fndecl, exp);
4781 /* Return the value in the proper mode for this function. */
4782 mode = TYPE_MODE (TREE_TYPE (exp));
4783 if (GET_MODE (result) == mode)
4784 return result;
4785 if (target == 0)
4786 return convert_to_mode (mode, result, 0);
4787 convert_move (target, result, 0);
4788 return target;
4791 /* Expand the library call ourselves using a stabilized argument
4792 list to avoid re-evaluating the function's arguments twice. */
4793 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4794 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4795 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4796 return expand_call (fn, target, target == const0_rtx);
4799 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4800 if that's convenient. */
4803 expand_builtin_saveregs (void)
4805 rtx val;
4806 rtx_insn *seq;
4808 /* Don't do __builtin_saveregs more than once in a function.
4809 Save the result of the first call and reuse it. */
4810 if (saveregs_value != 0)
4811 return saveregs_value;
4813 /* When this function is called, it means that registers must be
4814 saved on entry to this function. So we migrate the call to the
4815 first insn of this function. */
4817 start_sequence ();
4819 /* Do whatever the machine needs done in this case. */
4820 val = targetm.calls.expand_builtin_saveregs ();
4822 seq = get_insns ();
4823 end_sequence ();
4825 saveregs_value = val;
4827 /* Put the insns after the NOTE that starts the function. If this
4828 is inside a start_sequence, make the outer-level insn chain current, so
4829 the code is placed at the start of the function. */
4830 push_topmost_sequence ();
4831 emit_insn_after (seq, entry_of_function ());
4832 pop_topmost_sequence ();
4834 return val;
4837 /* Expand a call to __builtin_next_arg. */
4839 static rtx
4840 expand_builtin_next_arg (void)
4842 /* Checking arguments is already done in fold_builtin_next_arg
4843 that must be called before this function. */
4844 return expand_binop (ptr_mode, add_optab,
4845 crtl->args.internal_arg_pointer,
4846 crtl->args.arg_offset_rtx,
4847 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4850 /* Make it easier for the backends by protecting the valist argument
4851 from multiple evaluations. */
4853 static tree
4854 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4856 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4858 /* The current way of determining the type of valist is completely
4859 bogus. We should have the information on the va builtin instead. */
4860 if (!vatype)
4861 vatype = targetm.fn_abi_va_list (cfun->decl);
4863 if (TREE_CODE (vatype) == ARRAY_TYPE)
4865 if (TREE_SIDE_EFFECTS (valist))
4866 valist = save_expr (valist);
4868 /* For this case, the backends will be expecting a pointer to
4869 vatype, but it's possible we've actually been given an array
4870 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4871 So fix it. */
4872 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4874 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4875 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4878 else
4880 tree pt = build_pointer_type (vatype);
4882 if (! needs_lvalue)
4884 if (! TREE_SIDE_EFFECTS (valist))
4885 return valist;
4887 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4888 TREE_SIDE_EFFECTS (valist) = 1;
4891 if (TREE_SIDE_EFFECTS (valist))
4892 valist = save_expr (valist);
4893 valist = fold_build2_loc (loc, MEM_REF,
4894 vatype, valist, build_int_cst (pt, 0));
4897 return valist;
4900 /* The "standard" definition of va_list is void*. */
4902 tree
4903 std_build_builtin_va_list (void)
4905 return ptr_type_node;
4908 /* The "standard" abi va_list is va_list_type_node. */
4910 tree
4911 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4913 return va_list_type_node;
4916 /* The "standard" type of va_list is va_list_type_node. */
4918 tree
4919 std_canonical_va_list_type (tree type)
4921 tree wtype, htype;
4923 wtype = va_list_type_node;
4924 htype = type;
4926 if (TREE_CODE (wtype) == ARRAY_TYPE)
4928 /* If va_list is an array type, the argument may have decayed
4929 to a pointer type, e.g. by being passed to another function.
4930 In that case, unwrap both types so that we can compare the
4931 underlying records. */
4932 if (TREE_CODE (htype) == ARRAY_TYPE
4933 || POINTER_TYPE_P (htype))
4935 wtype = TREE_TYPE (wtype);
4936 htype = TREE_TYPE (htype);
4939 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4940 return va_list_type_node;
4942 return NULL_TREE;
4945 /* The "standard" implementation of va_start: just assign `nextarg' to
4946 the variable. */
4948 void
4949 std_expand_builtin_va_start (tree valist, rtx nextarg)
4951 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4952 convert_move (va_r, nextarg, 0);
4955 /* Expand EXP, a call to __builtin_va_start. */
4957 static rtx
4958 expand_builtin_va_start (tree exp)
4960 rtx nextarg;
4961 tree valist;
4962 location_t loc = EXPR_LOCATION (exp);
4964 if (call_expr_nargs (exp) < 2)
4966 error_at (loc, "too few arguments to function %<va_start%>");
4967 return const0_rtx;
4970 if (fold_builtin_next_arg (exp, true))
4971 return const0_rtx;
4973 nextarg = expand_builtin_next_arg ();
4974 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4976 if (targetm.expand_builtin_va_start)
4977 targetm.expand_builtin_va_start (valist, nextarg);
4978 else
4979 std_expand_builtin_va_start (valist, nextarg);
4981 return const0_rtx;
4984 /* Expand EXP, a call to __builtin_va_end. */
4986 static rtx
4987 expand_builtin_va_end (tree exp)
4989 tree valist = CALL_EXPR_ARG (exp, 0);
4991 /* Evaluate for side effects, if needed. I hate macros that don't
4992 do that. */
4993 if (TREE_SIDE_EFFECTS (valist))
4994 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4996 return const0_rtx;
4999 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5000 builtin rather than just as an assignment in stdarg.h because of the
5001 nastiness of array-type va_list types. */
5003 static rtx
5004 expand_builtin_va_copy (tree exp)
5006 tree dst, src, t;
5007 location_t loc = EXPR_LOCATION (exp);
5009 dst = CALL_EXPR_ARG (exp, 0);
5010 src = CALL_EXPR_ARG (exp, 1);
5012 dst = stabilize_va_list_loc (loc, dst, 1);
5013 src = stabilize_va_list_loc (loc, src, 0);
5015 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5017 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5019 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5020 TREE_SIDE_EFFECTS (t) = 1;
5021 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5023 else
5025 rtx dstb, srcb, size;
5027 /* Evaluate to pointers. */
5028 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5029 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5030 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5031 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5033 dstb = convert_memory_address (Pmode, dstb);
5034 srcb = convert_memory_address (Pmode, srcb);
5036 /* "Dereference" to BLKmode memories. */
5037 dstb = gen_rtx_MEM (BLKmode, dstb);
5038 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5039 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5040 srcb = gen_rtx_MEM (BLKmode, srcb);
5041 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5042 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5044 /* Copy. */
5045 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5048 return const0_rtx;
5051 /* Expand a call to one of the builtin functions __builtin_frame_address or
5052 __builtin_return_address. */
5054 static rtx
5055 expand_builtin_frame_address (tree fndecl, tree exp)
5057 /* The argument must be a nonnegative integer constant.
5058 It counts the number of frames to scan up the stack.
5059 The value is either the frame pointer value or the return
5060 address saved in that frame. */
5061 if (call_expr_nargs (exp) == 0)
5062 /* Warning about missing arg was already issued. */
5063 return const0_rtx;
5064 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5066 error ("invalid argument to %qD", fndecl);
5067 return const0_rtx;
5069 else
5071 /* Number of frames to scan up the stack. */
5072 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5074 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5076 /* Some ports cannot access arbitrary stack frames. */
5077 if (tem == NULL)
5079 warning (0, "unsupported argument to %qD", fndecl);
5080 return const0_rtx;
5083 if (count)
5085 /* Warn since no effort is made to ensure that any frame
5086 beyond the current one exists or can be safely reached. */
5087 warning (OPT_Wframe_address, "calling %qD with "
5088 "a nonzero argument is unsafe", fndecl);
5091 /* For __builtin_frame_address, return what we've got. */
5092 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5093 return tem;
5095 if (!REG_P (tem)
5096 && ! CONSTANT_P (tem))
5097 tem = copy_addr_to_reg (tem);
5098 return tem;
5102 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5103 failed and the caller should emit a normal call. */
5105 static rtx
5106 expand_builtin_alloca (tree exp)
5108 rtx op0;
5109 rtx result;
5110 unsigned int align;
5111 tree fndecl = get_callee_fndecl (exp);
5112 HOST_WIDE_INT max_size;
5113 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5114 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5115 bool valid_arglist
5116 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5117 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5118 VOID_TYPE)
5119 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5120 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5121 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5123 if (!valid_arglist)
5124 return NULL_RTX;
5126 if ((alloca_for_var
5127 && warn_vla_limit >= HOST_WIDE_INT_MAX
5128 && warn_alloc_size_limit < warn_vla_limit)
5129 || (!alloca_for_var
5130 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5131 && warn_alloc_size_limit < warn_alloca_limit
5134 /* -Walloca-larger-than and -Wvla-larger-than settings of
5135 less than HOST_WIDE_INT_MAX override the more general
5136 -Walloc-size-larger-than so unless either of the former
5137 options is smaller than the last one (wchich would imply
5138 that the call was already checked), check the alloca
5139 arguments for overflow. */
5140 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5141 int idx[] = { 0, -1 };
5142 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5145 /* Compute the argument. */
5146 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5148 /* Compute the alignment. */
5149 align = (fcode == BUILT_IN_ALLOCA
5150 ? BIGGEST_ALIGNMENT
5151 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5153 /* Compute the maximum size. */
5154 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5155 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5156 : -1);
5158 /* Allocate the desired space. If the allocation stems from the declaration
5159 of a variable-sized object, it cannot accumulate. */
5160 result
5161 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5162 result = convert_memory_address (ptr_mode, result);
5164 return result;
5167 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5168 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5169 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5170 handle_builtin_stack_restore function. */
5172 static rtx
5173 expand_asan_emit_allocas_unpoison (tree exp)
5175 tree arg0 = CALL_EXPR_ARG (exp, 0);
5176 tree arg1 = CALL_EXPR_ARG (exp, 1);
5177 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5178 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5179 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5180 stack_pointer_rtx, NULL_RTX, 0,
5181 OPTAB_LIB_WIDEN);
5182 off = convert_modes (ptr_mode, Pmode, off, 0);
5183 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5184 OPTAB_LIB_WIDEN);
5185 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5186 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5187 top, ptr_mode, bot, ptr_mode);
5188 return ret;
5191 /* Expand a call to bswap builtin in EXP.
5192 Return NULL_RTX if a normal call should be emitted rather than expanding the
5193 function in-line. If convenient, the result should be placed in TARGET.
5194 SUBTARGET may be used as the target for computing one of EXP's operands. */
5196 static rtx
5197 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5198 rtx subtarget)
5200 tree arg;
5201 rtx op0;
5203 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5204 return NULL_RTX;
5206 arg = CALL_EXPR_ARG (exp, 0);
5207 op0 = expand_expr (arg,
5208 subtarget && GET_MODE (subtarget) == target_mode
5209 ? subtarget : NULL_RTX,
5210 target_mode, EXPAND_NORMAL);
5211 if (GET_MODE (op0) != target_mode)
5212 op0 = convert_to_mode (target_mode, op0, 1);
5214 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5216 gcc_assert (target);
5218 return convert_to_mode (target_mode, target, 1);
5221 /* Expand a call to a unary builtin in EXP.
5222 Return NULL_RTX if a normal call should be emitted rather than expanding the
5223 function in-line. If convenient, the result should be placed in TARGET.
5224 SUBTARGET may be used as the target for computing one of EXP's operands. */
5226 static rtx
5227 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5228 rtx subtarget, optab op_optab)
5230 rtx op0;
5232 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5233 return NULL_RTX;
5235 /* Compute the argument. */
5236 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5237 (subtarget
5238 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5239 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5240 VOIDmode, EXPAND_NORMAL);
5241 /* Compute op, into TARGET if possible.
5242 Set TARGET to wherever the result comes back. */
5243 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5244 op_optab, op0, target, op_optab != clrsb_optab);
5245 gcc_assert (target);
5247 return convert_to_mode (target_mode, target, 0);
5250 /* Expand a call to __builtin_expect. We just return our argument
5251 as the builtin_expect semantic should've been already executed by
5252 tree branch prediction pass. */
5254 static rtx
5255 expand_builtin_expect (tree exp, rtx target)
5257 tree arg;
5259 if (call_expr_nargs (exp) < 2)
5260 return const0_rtx;
5261 arg = CALL_EXPR_ARG (exp, 0);
5263 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5264 /* When guessing was done, the hints should be already stripped away. */
5265 gcc_assert (!flag_guess_branch_prob
5266 || optimize == 0 || seen_error ());
5267 return target;
5270 /* Expand a call to __builtin_expect_with_probability. We just return our
5271 argument as the builtin_expect semantic should've been already executed by
5272 tree branch prediction pass. */
5274 static rtx
5275 expand_builtin_expect_with_probability (tree exp, rtx target)
5277 tree arg;
5279 if (call_expr_nargs (exp) < 3)
5280 return const0_rtx;
5281 arg = CALL_EXPR_ARG (exp, 0);
5283 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5284 /* When guessing was done, the hints should be already stripped away. */
5285 gcc_assert (!flag_guess_branch_prob
5286 || optimize == 0 || seen_error ());
5287 return target;
5291 /* Expand a call to __builtin_assume_aligned. We just return our first
5292 argument as the builtin_assume_aligned semantic should've been already
5293 executed by CCP. */
5295 static rtx
5296 expand_builtin_assume_aligned (tree exp, rtx target)
5298 if (call_expr_nargs (exp) < 2)
5299 return const0_rtx;
5300 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5301 EXPAND_NORMAL);
5302 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5303 && (call_expr_nargs (exp) < 3
5304 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5305 return target;
5308 void
5309 expand_builtin_trap (void)
5311 if (targetm.have_trap ())
5313 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5314 /* For trap insns when not accumulating outgoing args force
5315 REG_ARGS_SIZE note to prevent crossjumping of calls with
5316 different args sizes. */
5317 if (!ACCUMULATE_OUTGOING_ARGS)
5318 add_args_size_note (insn, stack_pointer_delta);
5320 else
5322 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5323 tree call_expr = build_call_expr (fn, 0);
5324 expand_call (call_expr, NULL_RTX, false);
5327 emit_barrier ();
5330 /* Expand a call to __builtin_unreachable. We do nothing except emit
5331 a barrier saying that control flow will not pass here.
5333 It is the responsibility of the program being compiled to ensure
5334 that control flow does never reach __builtin_unreachable. */
5335 static void
5336 expand_builtin_unreachable (void)
5338 emit_barrier ();
5341 /* Expand EXP, a call to fabs, fabsf or fabsl.
5342 Return NULL_RTX if a normal call should be emitted rather than expanding
5343 the function inline. If convenient, the result should be placed
5344 in TARGET. SUBTARGET may be used as the target for computing
5345 the operand. */
5347 static rtx
5348 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5350 machine_mode mode;
5351 tree arg;
5352 rtx op0;
5354 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5355 return NULL_RTX;
5357 arg = CALL_EXPR_ARG (exp, 0);
5358 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5359 mode = TYPE_MODE (TREE_TYPE (arg));
5360 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5361 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5364 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5365 Return NULL is a normal call should be emitted rather than expanding the
5366 function inline. If convenient, the result should be placed in TARGET.
5367 SUBTARGET may be used as the target for computing the operand. */
5369 static rtx
5370 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5372 rtx op0, op1;
5373 tree arg;
5375 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5376 return NULL_RTX;
5378 arg = CALL_EXPR_ARG (exp, 0);
5379 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5381 arg = CALL_EXPR_ARG (exp, 1);
5382 op1 = expand_normal (arg);
5384 return expand_copysign (op0, op1, target);
5387 /* Expand a call to __builtin___clear_cache. */
5389 static rtx
5390 expand_builtin___clear_cache (tree exp)
5392 if (!targetm.code_for_clear_cache)
5394 #ifdef CLEAR_INSN_CACHE
5395 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5396 does something. Just do the default expansion to a call to
5397 __clear_cache(). */
5398 return NULL_RTX;
5399 #else
5400 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5401 does nothing. There is no need to call it. Do nothing. */
5402 return const0_rtx;
5403 #endif /* CLEAR_INSN_CACHE */
5406 /* We have a "clear_cache" insn, and it will handle everything. */
5407 tree begin, end;
5408 rtx begin_rtx, end_rtx;
5410 /* We must not expand to a library call. If we did, any
5411 fallback library function in libgcc that might contain a call to
5412 __builtin___clear_cache() would recurse infinitely. */
5413 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5415 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5416 return const0_rtx;
5419 if (targetm.have_clear_cache ())
5421 struct expand_operand ops[2];
5423 begin = CALL_EXPR_ARG (exp, 0);
5424 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5426 end = CALL_EXPR_ARG (exp, 1);
5427 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5429 create_address_operand (&ops[0], begin_rtx);
5430 create_address_operand (&ops[1], end_rtx);
5431 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5432 return const0_rtx;
5434 return const0_rtx;
5437 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5439 static rtx
5440 round_trampoline_addr (rtx tramp)
5442 rtx temp, addend, mask;
5444 /* If we don't need too much alignment, we'll have been guaranteed
5445 proper alignment by get_trampoline_type. */
5446 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5447 return tramp;
5449 /* Round address up to desired boundary. */
5450 temp = gen_reg_rtx (Pmode);
5451 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5452 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5454 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5455 temp, 0, OPTAB_LIB_WIDEN);
5456 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5457 temp, 0, OPTAB_LIB_WIDEN);
5459 return tramp;
5462 static rtx
5463 expand_builtin_init_trampoline (tree exp, bool onstack)
5465 tree t_tramp, t_func, t_chain;
5466 rtx m_tramp, r_tramp, r_chain, tmp;
5468 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5469 POINTER_TYPE, VOID_TYPE))
5470 return NULL_RTX;
5472 t_tramp = CALL_EXPR_ARG (exp, 0);
5473 t_func = CALL_EXPR_ARG (exp, 1);
5474 t_chain = CALL_EXPR_ARG (exp, 2);
5476 r_tramp = expand_normal (t_tramp);
5477 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5478 MEM_NOTRAP_P (m_tramp) = 1;
5480 /* If ONSTACK, the TRAMP argument should be the address of a field
5481 within the local function's FRAME decl. Either way, let's see if
5482 we can fill in the MEM_ATTRs for this memory. */
5483 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5484 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5486 /* Creator of a heap trampoline is responsible for making sure the
5487 address is aligned to at least STACK_BOUNDARY. Normally malloc
5488 will ensure this anyhow. */
5489 tmp = round_trampoline_addr (r_tramp);
5490 if (tmp != r_tramp)
5492 m_tramp = change_address (m_tramp, BLKmode, tmp);
5493 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5494 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5497 /* The FUNC argument should be the address of the nested function.
5498 Extract the actual function decl to pass to the hook. */
5499 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5500 t_func = TREE_OPERAND (t_func, 0);
5501 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5503 r_chain = expand_normal (t_chain);
5505 /* Generate insns to initialize the trampoline. */
5506 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5508 if (onstack)
5510 trampolines_created = 1;
5512 if (targetm.calls.custom_function_descriptors != 0)
5513 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5514 "trampoline generated for nested function %qD", t_func);
5517 return const0_rtx;
5520 static rtx
5521 expand_builtin_adjust_trampoline (tree exp)
5523 rtx tramp;
5525 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5526 return NULL_RTX;
5528 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5529 tramp = round_trampoline_addr (tramp);
5530 if (targetm.calls.trampoline_adjust_address)
5531 tramp = targetm.calls.trampoline_adjust_address (tramp);
5533 return tramp;
5536 /* Expand a call to the builtin descriptor initialization routine.
5537 A descriptor is made up of a couple of pointers to the static
5538 chain and the code entry in this order. */
5540 static rtx
5541 expand_builtin_init_descriptor (tree exp)
5543 tree t_descr, t_func, t_chain;
5544 rtx m_descr, r_descr, r_func, r_chain;
5546 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5547 VOID_TYPE))
5548 return NULL_RTX;
5550 t_descr = CALL_EXPR_ARG (exp, 0);
5551 t_func = CALL_EXPR_ARG (exp, 1);
5552 t_chain = CALL_EXPR_ARG (exp, 2);
5554 r_descr = expand_normal (t_descr);
5555 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5556 MEM_NOTRAP_P (m_descr) = 1;
5558 r_func = expand_normal (t_func);
5559 r_chain = expand_normal (t_chain);
5561 /* Generate insns to initialize the descriptor. */
5562 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5563 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5564 POINTER_SIZE / BITS_PER_UNIT), r_func);
5566 return const0_rtx;
5569 /* Expand a call to the builtin descriptor adjustment routine. */
5571 static rtx
5572 expand_builtin_adjust_descriptor (tree exp)
5574 rtx tramp;
5576 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5577 return NULL_RTX;
5579 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5581 /* Unalign the descriptor to allow runtime identification. */
5582 tramp = plus_constant (ptr_mode, tramp,
5583 targetm.calls.custom_function_descriptors);
5585 return force_operand (tramp, NULL_RTX);
5588 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5589 function. The function first checks whether the back end provides
5590 an insn to implement signbit for the respective mode. If not, it
5591 checks whether the floating point format of the value is such that
5592 the sign bit can be extracted. If that is not the case, error out.
5593 EXP is the expression that is a call to the builtin function; if
5594 convenient, the result should be placed in TARGET. */
5595 static rtx
5596 expand_builtin_signbit (tree exp, rtx target)
5598 const struct real_format *fmt;
5599 scalar_float_mode fmode;
5600 scalar_int_mode rmode, imode;
5601 tree arg;
5602 int word, bitpos;
5603 enum insn_code icode;
5604 rtx temp;
5605 location_t loc = EXPR_LOCATION (exp);
5607 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5608 return NULL_RTX;
5610 arg = CALL_EXPR_ARG (exp, 0);
5611 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5612 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5613 fmt = REAL_MODE_FORMAT (fmode);
5615 arg = builtin_save_expr (arg);
5617 /* Expand the argument yielding a RTX expression. */
5618 temp = expand_normal (arg);
5620 /* Check if the back end provides an insn that handles signbit for the
5621 argument's mode. */
5622 icode = optab_handler (signbit_optab, fmode);
5623 if (icode != CODE_FOR_nothing)
5625 rtx_insn *last = get_last_insn ();
5626 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5627 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5628 return target;
5629 delete_insns_since (last);
5632 /* For floating point formats without a sign bit, implement signbit
5633 as "ARG < 0.0". */
5634 bitpos = fmt->signbit_ro;
5635 if (bitpos < 0)
5637 /* But we can't do this if the format supports signed zero. */
5638 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5640 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5641 build_real (TREE_TYPE (arg), dconst0));
5642 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5645 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5647 imode = int_mode_for_mode (fmode).require ();
5648 temp = gen_lowpart (imode, temp);
5650 else
5652 imode = word_mode;
5653 /* Handle targets with different FP word orders. */
5654 if (FLOAT_WORDS_BIG_ENDIAN)
5655 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5656 else
5657 word = bitpos / BITS_PER_WORD;
5658 temp = operand_subword_force (temp, word, fmode);
5659 bitpos = bitpos % BITS_PER_WORD;
5662 /* Force the intermediate word_mode (or narrower) result into a
5663 register. This avoids attempting to create paradoxical SUBREGs
5664 of floating point modes below. */
5665 temp = force_reg (imode, temp);
5667 /* If the bitpos is within the "result mode" lowpart, the operation
5668 can be implement with a single bitwise AND. Otherwise, we need
5669 a right shift and an AND. */
5671 if (bitpos < GET_MODE_BITSIZE (rmode))
5673 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5675 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5676 temp = gen_lowpart (rmode, temp);
5677 temp = expand_binop (rmode, and_optab, temp,
5678 immed_wide_int_const (mask, rmode),
5679 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5681 else
5683 /* Perform a logical right shift to place the signbit in the least
5684 significant bit, then truncate the result to the desired mode
5685 and mask just this bit. */
5686 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5687 temp = gen_lowpart (rmode, temp);
5688 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5689 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5692 return temp;
5695 /* Expand fork or exec calls. TARGET is the desired target of the
5696 call. EXP is the call. FN is the
5697 identificator of the actual function. IGNORE is nonzero if the
5698 value is to be ignored. */
5700 static rtx
5701 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5703 tree id, decl;
5704 tree call;
5706 /* If we are not profiling, just call the function. */
5707 if (!profile_arc_flag)
5708 return NULL_RTX;
5710 /* Otherwise call the wrapper. This should be equivalent for the rest of
5711 compiler, so the code does not diverge, and the wrapper may run the
5712 code necessary for keeping the profiling sane. */
5714 switch (DECL_FUNCTION_CODE (fn))
5716 case BUILT_IN_FORK:
5717 id = get_identifier ("__gcov_fork");
5718 break;
5720 case BUILT_IN_EXECL:
5721 id = get_identifier ("__gcov_execl");
5722 break;
5724 case BUILT_IN_EXECV:
5725 id = get_identifier ("__gcov_execv");
5726 break;
5728 case BUILT_IN_EXECLP:
5729 id = get_identifier ("__gcov_execlp");
5730 break;
5732 case BUILT_IN_EXECLE:
5733 id = get_identifier ("__gcov_execle");
5734 break;
5736 case BUILT_IN_EXECVP:
5737 id = get_identifier ("__gcov_execvp");
5738 break;
5740 case BUILT_IN_EXECVE:
5741 id = get_identifier ("__gcov_execve");
5742 break;
5744 default:
5745 gcc_unreachable ();
5748 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5749 FUNCTION_DECL, id, TREE_TYPE (fn));
5750 DECL_EXTERNAL (decl) = 1;
5751 TREE_PUBLIC (decl) = 1;
5752 DECL_ARTIFICIAL (decl) = 1;
5753 TREE_NOTHROW (decl) = 1;
5754 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5755 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5756 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5757 return expand_call (call, target, ignore);
5762 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5763 the pointer in these functions is void*, the tree optimizers may remove
5764 casts. The mode computed in expand_builtin isn't reliable either, due
5765 to __sync_bool_compare_and_swap.
5767 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5768 group of builtins. This gives us log2 of the mode size. */
5770 static inline machine_mode
5771 get_builtin_sync_mode (int fcode_diff)
5773 /* The size is not negotiable, so ask not to get BLKmode in return
5774 if the target indicates that a smaller size would be better. */
5775 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5778 /* Expand the memory expression LOC and return the appropriate memory operand
5779 for the builtin_sync operations. */
5781 static rtx
5782 get_builtin_sync_mem (tree loc, machine_mode mode)
5784 rtx addr, mem;
5786 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5787 addr = convert_memory_address (Pmode, addr);
5789 /* Note that we explicitly do not want any alias information for this
5790 memory, so that we kill all other live memories. Otherwise we don't
5791 satisfy the full barrier semantics of the intrinsic. */
5792 mem = validize_mem (gen_rtx_MEM (mode, addr));
5794 /* The alignment needs to be at least according to that of the mode. */
5795 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5796 get_pointer_alignment (loc)));
5797 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5798 MEM_VOLATILE_P (mem) = 1;
5800 return mem;
5803 /* Make sure an argument is in the right mode.
5804 EXP is the tree argument.
5805 MODE is the mode it should be in. */
5807 static rtx
5808 expand_expr_force_mode (tree exp, machine_mode mode)
5810 rtx val;
5811 machine_mode old_mode;
5813 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5814 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5815 of CONST_INTs, where we know the old_mode only from the call argument. */
5817 old_mode = GET_MODE (val);
5818 if (old_mode == VOIDmode)
5819 old_mode = TYPE_MODE (TREE_TYPE (exp));
5820 val = convert_modes (mode, old_mode, val, 1);
5821 return val;
5825 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5826 EXP is the CALL_EXPR. CODE is the rtx code
5827 that corresponds to the arithmetic or logical operation from the name;
5828 an exception here is that NOT actually means NAND. TARGET is an optional
5829 place for us to store the results; AFTER is true if this is the
5830 fetch_and_xxx form. */
5832 static rtx
5833 expand_builtin_sync_operation (machine_mode mode, tree exp,
5834 enum rtx_code code, bool after,
5835 rtx target)
5837 rtx val, mem;
5838 location_t loc = EXPR_LOCATION (exp);
5840 if (code == NOT && warn_sync_nand)
5842 tree fndecl = get_callee_fndecl (exp);
5843 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 static bool warned_f_a_n, warned_n_a_f;
5847 switch (fcode)
5849 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5850 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5851 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5852 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5853 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5854 if (warned_f_a_n)
5855 break;
5857 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5858 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5859 warned_f_a_n = true;
5860 break;
5862 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5863 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5864 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5865 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5866 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5867 if (warned_n_a_f)
5868 break;
5870 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5871 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5872 warned_n_a_f = true;
5873 break;
5875 default:
5876 gcc_unreachable ();
5880 /* Expand the operands. */
5881 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5882 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5884 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5885 after);
5888 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5889 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5890 true if this is the boolean form. TARGET is a place for us to store the
5891 results; this is NOT optional if IS_BOOL is true. */
5893 static rtx
5894 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5895 bool is_bool, rtx target)
5897 rtx old_val, new_val, mem;
5898 rtx *pbool, *poval;
5900 /* Expand the operands. */
5901 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5902 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5903 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5905 pbool = poval = NULL;
5906 if (target != const0_rtx)
5908 if (is_bool)
5909 pbool = &target;
5910 else
5911 poval = &target;
5913 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5914 false, MEMMODEL_SYNC_SEQ_CST,
5915 MEMMODEL_SYNC_SEQ_CST))
5916 return NULL_RTX;
5918 return target;
5921 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5922 general form is actually an atomic exchange, and some targets only
5923 support a reduced form with the second argument being a constant 1.
5924 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5925 the results. */
5927 static rtx
5928 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5929 rtx target)
5931 rtx val, mem;
5933 /* Expand the operands. */
5934 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5935 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5937 return expand_sync_lock_test_and_set (target, mem, val);
5940 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5942 static void
5943 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5945 rtx mem;
5947 /* Expand the operands. */
5948 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5950 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5953 /* Given an integer representing an ``enum memmodel'', verify its
5954 correctness and return the memory model enum. */
5956 static enum memmodel
5957 get_memmodel (tree exp)
5959 rtx op;
5960 unsigned HOST_WIDE_INT val;
5961 source_location loc
5962 = expansion_point_location_if_in_system_header (input_location);
5964 /* If the parameter is not a constant, it's a run time value so we'll just
5965 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5966 if (TREE_CODE (exp) != INTEGER_CST)
5967 return MEMMODEL_SEQ_CST;
5969 op = expand_normal (exp);
5971 val = INTVAL (op);
5972 if (targetm.memmodel_check)
5973 val = targetm.memmodel_check (val);
5974 else if (val & ~MEMMODEL_MASK)
5976 warning_at (loc, OPT_Winvalid_memory_model,
5977 "unknown architecture specifier in memory model to builtin");
5978 return MEMMODEL_SEQ_CST;
5981 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5982 if (memmodel_base (val) >= MEMMODEL_LAST)
5984 warning_at (loc, OPT_Winvalid_memory_model,
5985 "invalid memory model argument to builtin");
5986 return MEMMODEL_SEQ_CST;
5989 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5990 be conservative and promote consume to acquire. */
5991 if (val == MEMMODEL_CONSUME)
5992 val = MEMMODEL_ACQUIRE;
5994 return (enum memmodel) val;
5997 /* Expand the __atomic_exchange intrinsic:
5998 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5999 EXP is the CALL_EXPR.
6000 TARGET is an optional place for us to store the results. */
6002 static rtx
6003 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6005 rtx val, mem;
6006 enum memmodel model;
6008 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6010 if (!flag_inline_atomics)
6011 return NULL_RTX;
6013 /* Expand the operands. */
6014 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6015 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6017 return expand_atomic_exchange (target, mem, val, model);
6020 /* Expand the __atomic_compare_exchange intrinsic:
6021 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6022 TYPE desired, BOOL weak,
6023 enum memmodel success,
6024 enum memmodel failure)
6025 EXP is the CALL_EXPR.
6026 TARGET is an optional place for us to store the results. */
6028 static rtx
6029 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6030 rtx target)
6032 rtx expect, desired, mem, oldval;
6033 rtx_code_label *label;
6034 enum memmodel success, failure;
6035 tree weak;
6036 bool is_weak;
6037 source_location loc
6038 = expansion_point_location_if_in_system_header (input_location);
6040 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6041 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6043 if (failure > success)
6045 warning_at (loc, OPT_Winvalid_memory_model,
6046 "failure memory model cannot be stronger than success "
6047 "memory model for %<__atomic_compare_exchange%>");
6048 success = MEMMODEL_SEQ_CST;
6051 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6053 warning_at (loc, OPT_Winvalid_memory_model,
6054 "invalid failure memory model for "
6055 "%<__atomic_compare_exchange%>");
6056 failure = MEMMODEL_SEQ_CST;
6057 success = MEMMODEL_SEQ_CST;
6061 if (!flag_inline_atomics)
6062 return NULL_RTX;
6064 /* Expand the operands. */
6065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6067 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6068 expect = convert_memory_address (Pmode, expect);
6069 expect = gen_rtx_MEM (mode, expect);
6070 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6072 weak = CALL_EXPR_ARG (exp, 3);
6073 is_weak = false;
6074 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6075 is_weak = true;
6077 if (target == const0_rtx)
6078 target = NULL;
6080 /* Lest the rtl backend create a race condition with an imporoper store
6081 to memory, always create a new pseudo for OLDVAL. */
6082 oldval = NULL;
6084 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6085 is_weak, success, failure))
6086 return NULL_RTX;
6088 /* Conditionally store back to EXPECT, lest we create a race condition
6089 with an improper store to memory. */
6090 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6091 the normal case where EXPECT is totally private, i.e. a register. At
6092 which point the store can be unconditional. */
6093 label = gen_label_rtx ();
6094 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6095 GET_MODE (target), 1, label);
6096 emit_move_insn (expect, oldval);
6097 emit_label (label);
6099 return target;
6102 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6103 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6104 call. The weak parameter must be dropped to match the expected parameter
6105 list and the expected argument changed from value to pointer to memory
6106 slot. */
6108 static void
6109 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6111 unsigned int z;
6112 vec<tree, va_gc> *vec;
6114 vec_alloc (vec, 5);
6115 vec->quick_push (gimple_call_arg (call, 0));
6116 tree expected = gimple_call_arg (call, 1);
6117 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6118 TREE_TYPE (expected));
6119 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6120 if (expd != x)
6121 emit_move_insn (x, expd);
6122 tree v = make_tree (TREE_TYPE (expected), x);
6123 vec->quick_push (build1 (ADDR_EXPR,
6124 build_pointer_type (TREE_TYPE (expected)), v));
6125 vec->quick_push (gimple_call_arg (call, 2));
6126 /* Skip the boolean weak parameter. */
6127 for (z = 4; z < 6; z++)
6128 vec->quick_push (gimple_call_arg (call, z));
6129 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6130 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6131 gcc_assert (bytes_log2 < 5);
6132 built_in_function fncode
6133 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6134 + bytes_log2);
6135 tree fndecl = builtin_decl_explicit (fncode);
6136 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6137 fndecl);
6138 tree exp = build_call_vec (boolean_type_node, fn, vec);
6139 tree lhs = gimple_call_lhs (call);
6140 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6141 if (lhs)
6143 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6144 if (GET_MODE (boolret) != mode)
6145 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6146 x = force_reg (mode, x);
6147 write_complex_part (target, boolret, true);
6148 write_complex_part (target, x, false);
6152 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6154 void
6155 expand_ifn_atomic_compare_exchange (gcall *call)
6157 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6158 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6159 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6160 rtx expect, desired, mem, oldval, boolret;
6161 enum memmodel success, failure;
6162 tree lhs;
6163 bool is_weak;
6164 source_location loc
6165 = expansion_point_location_if_in_system_header (gimple_location (call));
6167 success = get_memmodel (gimple_call_arg (call, 4));
6168 failure = get_memmodel (gimple_call_arg (call, 5));
6170 if (failure > success)
6172 warning_at (loc, OPT_Winvalid_memory_model,
6173 "failure memory model cannot be stronger than success "
6174 "memory model for %<__atomic_compare_exchange%>");
6175 success = MEMMODEL_SEQ_CST;
6178 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6180 warning_at (loc, OPT_Winvalid_memory_model,
6181 "invalid failure memory model for "
6182 "%<__atomic_compare_exchange%>");
6183 failure = MEMMODEL_SEQ_CST;
6184 success = MEMMODEL_SEQ_CST;
6187 if (!flag_inline_atomics)
6189 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6190 return;
6193 /* Expand the operands. */
6194 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6196 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6197 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6199 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6201 boolret = NULL;
6202 oldval = NULL;
6204 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6205 is_weak, success, failure))
6207 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6208 return;
6211 lhs = gimple_call_lhs (call);
6212 if (lhs)
6214 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6215 if (GET_MODE (boolret) != mode)
6216 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6217 write_complex_part (target, boolret, true);
6218 write_complex_part (target, oldval, false);
6222 /* Expand the __atomic_load intrinsic:
6223 TYPE __atomic_load (TYPE *object, enum memmodel)
6224 EXP is the CALL_EXPR.
6225 TARGET is an optional place for us to store the results. */
6227 static rtx
6228 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6230 rtx mem;
6231 enum memmodel model;
6233 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6234 if (is_mm_release (model) || is_mm_acq_rel (model))
6236 source_location loc
6237 = expansion_point_location_if_in_system_header (input_location);
6238 warning_at (loc, OPT_Winvalid_memory_model,
6239 "invalid memory model for %<__atomic_load%>");
6240 model = MEMMODEL_SEQ_CST;
6243 if (!flag_inline_atomics)
6244 return NULL_RTX;
6246 /* Expand the operand. */
6247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6249 return expand_atomic_load (target, mem, model);
6253 /* Expand the __atomic_store intrinsic:
6254 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6255 EXP is the CALL_EXPR.
6256 TARGET is an optional place for us to store the results. */
6258 static rtx
6259 expand_builtin_atomic_store (machine_mode mode, tree exp)
6261 rtx mem, val;
6262 enum memmodel model;
6264 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6265 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6266 || is_mm_release (model)))
6268 source_location loc
6269 = expansion_point_location_if_in_system_header (input_location);
6270 warning_at (loc, OPT_Winvalid_memory_model,
6271 "invalid memory model for %<__atomic_store%>");
6272 model = MEMMODEL_SEQ_CST;
6275 if (!flag_inline_atomics)
6276 return NULL_RTX;
6278 /* Expand the operands. */
6279 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6280 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6282 return expand_atomic_store (mem, val, model, false);
6285 /* Expand the __atomic_fetch_XXX intrinsic:
6286 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6287 EXP is the CALL_EXPR.
6288 TARGET is an optional place for us to store the results.
6289 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6290 FETCH_AFTER is true if returning the result of the operation.
6291 FETCH_AFTER is false if returning the value before the operation.
6292 IGNORE is true if the result is not used.
6293 EXT_CALL is the correct builtin for an external call if this cannot be
6294 resolved to an instruction sequence. */
6296 static rtx
6297 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6298 enum rtx_code code, bool fetch_after,
6299 bool ignore, enum built_in_function ext_call)
6301 rtx val, mem, ret;
6302 enum memmodel model;
6303 tree fndecl;
6304 tree addr;
6306 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6308 /* Expand the operands. */
6309 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6310 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6312 /* Only try generating instructions if inlining is turned on. */
6313 if (flag_inline_atomics)
6315 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6316 if (ret)
6317 return ret;
6320 /* Return if a different routine isn't needed for the library call. */
6321 if (ext_call == BUILT_IN_NONE)
6322 return NULL_RTX;
6324 /* Change the call to the specified function. */
6325 fndecl = get_callee_fndecl (exp);
6326 addr = CALL_EXPR_FN (exp);
6327 STRIP_NOPS (addr);
6329 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6330 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6332 /* If we will emit code after the call, the call can not be a tail call.
6333 If it is emitted as a tail call, a barrier is emitted after it, and
6334 then all trailing code is removed. */
6335 if (!ignore)
6336 CALL_EXPR_TAILCALL (exp) = 0;
6338 /* Expand the call here so we can emit trailing code. */
6339 ret = expand_call (exp, target, ignore);
6341 /* Replace the original function just in case it matters. */
6342 TREE_OPERAND (addr, 0) = fndecl;
6344 /* Then issue the arithmetic correction to return the right result. */
6345 if (!ignore)
6347 if (code == NOT)
6349 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6350 OPTAB_LIB_WIDEN);
6351 ret = expand_simple_unop (mode, NOT, ret, target, true);
6353 else
6354 ret = expand_simple_binop (mode, code, ret, val, target, true,
6355 OPTAB_LIB_WIDEN);
6357 return ret;
6360 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6362 void
6363 expand_ifn_atomic_bit_test_and (gcall *call)
6365 tree ptr = gimple_call_arg (call, 0);
6366 tree bit = gimple_call_arg (call, 1);
6367 tree flag = gimple_call_arg (call, 2);
6368 tree lhs = gimple_call_lhs (call);
6369 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6370 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6371 enum rtx_code code;
6372 optab optab;
6373 struct expand_operand ops[5];
6375 gcc_assert (flag_inline_atomics);
6377 if (gimple_call_num_args (call) == 4)
6378 model = get_memmodel (gimple_call_arg (call, 3));
6380 rtx mem = get_builtin_sync_mem (ptr, mode);
6381 rtx val = expand_expr_force_mode (bit, mode);
6383 switch (gimple_call_internal_fn (call))
6385 case IFN_ATOMIC_BIT_TEST_AND_SET:
6386 code = IOR;
6387 optab = atomic_bit_test_and_set_optab;
6388 break;
6389 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6390 code = XOR;
6391 optab = atomic_bit_test_and_complement_optab;
6392 break;
6393 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6394 code = AND;
6395 optab = atomic_bit_test_and_reset_optab;
6396 break;
6397 default:
6398 gcc_unreachable ();
6401 if (lhs == NULL_TREE)
6403 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6404 val, NULL_RTX, true, OPTAB_DIRECT);
6405 if (code == AND)
6406 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6407 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6408 return;
6411 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6412 enum insn_code icode = direct_optab_handler (optab, mode);
6413 gcc_assert (icode != CODE_FOR_nothing);
6414 create_output_operand (&ops[0], target, mode);
6415 create_fixed_operand (&ops[1], mem);
6416 create_convert_operand_to (&ops[2], val, mode, true);
6417 create_integer_operand (&ops[3], model);
6418 create_integer_operand (&ops[4], integer_onep (flag));
6419 if (maybe_expand_insn (icode, 5, ops))
6420 return;
6422 rtx bitval = val;
6423 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6424 val, NULL_RTX, true, OPTAB_DIRECT);
6425 rtx maskval = val;
6426 if (code == AND)
6427 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6428 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6429 code, model, false);
6430 if (integer_onep (flag))
6432 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6433 NULL_RTX, true, OPTAB_DIRECT);
6434 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6435 true, OPTAB_DIRECT);
6437 else
6438 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6439 OPTAB_DIRECT);
6440 if (result != target)
6441 emit_move_insn (target, result);
6444 /* Expand an atomic clear operation.
6445 void _atomic_clear (BOOL *obj, enum memmodel)
6446 EXP is the call expression. */
6448 static rtx
6449 expand_builtin_atomic_clear (tree exp)
6451 machine_mode mode;
6452 rtx mem, ret;
6453 enum memmodel model;
6455 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6456 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6457 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6459 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6461 source_location loc
6462 = expansion_point_location_if_in_system_header (input_location);
6463 warning_at (loc, OPT_Winvalid_memory_model,
6464 "invalid memory model for %<__atomic_store%>");
6465 model = MEMMODEL_SEQ_CST;
6468 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6469 Failing that, a store is issued by __atomic_store. The only way this can
6470 fail is if the bool type is larger than a word size. Unlikely, but
6471 handle it anyway for completeness. Assume a single threaded model since
6472 there is no atomic support in this case, and no barriers are required. */
6473 ret = expand_atomic_store (mem, const0_rtx, model, true);
6474 if (!ret)
6475 emit_move_insn (mem, const0_rtx);
6476 return const0_rtx;
6479 /* Expand an atomic test_and_set operation.
6480 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6481 EXP is the call expression. */
6483 static rtx
6484 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6486 rtx mem;
6487 enum memmodel model;
6488 machine_mode mode;
6490 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6491 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6492 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6494 return expand_atomic_test_and_set (target, mem, model);
6498 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6499 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6501 static tree
6502 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6504 int size;
6505 machine_mode mode;
6506 unsigned int mode_align, type_align;
6508 if (TREE_CODE (arg0) != INTEGER_CST)
6509 return NULL_TREE;
6511 /* We need a corresponding integer mode for the access to be lock-free. */
6512 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6513 if (!int_mode_for_size (size, 0).exists (&mode))
6514 return boolean_false_node;
6516 mode_align = GET_MODE_ALIGNMENT (mode);
6518 if (TREE_CODE (arg1) == INTEGER_CST)
6520 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6522 /* Either this argument is null, or it's a fake pointer encoding
6523 the alignment of the object. */
6524 val = least_bit_hwi (val);
6525 val *= BITS_PER_UNIT;
6527 if (val == 0 || mode_align < val)
6528 type_align = mode_align;
6529 else
6530 type_align = val;
6532 else
6534 tree ttype = TREE_TYPE (arg1);
6536 /* This function is usually invoked and folded immediately by the front
6537 end before anything else has a chance to look at it. The pointer
6538 parameter at this point is usually cast to a void *, so check for that
6539 and look past the cast. */
6540 if (CONVERT_EXPR_P (arg1)
6541 && POINTER_TYPE_P (ttype)
6542 && VOID_TYPE_P (TREE_TYPE (ttype))
6543 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6544 arg1 = TREE_OPERAND (arg1, 0);
6546 ttype = TREE_TYPE (arg1);
6547 gcc_assert (POINTER_TYPE_P (ttype));
6549 /* Get the underlying type of the object. */
6550 ttype = TREE_TYPE (ttype);
6551 type_align = TYPE_ALIGN (ttype);
6554 /* If the object has smaller alignment, the lock free routines cannot
6555 be used. */
6556 if (type_align < mode_align)
6557 return boolean_false_node;
6559 /* Check if a compare_and_swap pattern exists for the mode which represents
6560 the required size. The pattern is not allowed to fail, so the existence
6561 of the pattern indicates support is present. Also require that an
6562 atomic load exists for the required size. */
6563 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6564 return boolean_true_node;
6565 else
6566 return boolean_false_node;
6569 /* Return true if the parameters to call EXP represent an object which will
6570 always generate lock free instructions. The first argument represents the
6571 size of the object, and the second parameter is a pointer to the object
6572 itself. If NULL is passed for the object, then the result is based on
6573 typical alignment for an object of the specified size. Otherwise return
6574 false. */
6576 static rtx
6577 expand_builtin_atomic_always_lock_free (tree exp)
6579 tree size;
6580 tree arg0 = CALL_EXPR_ARG (exp, 0);
6581 tree arg1 = CALL_EXPR_ARG (exp, 1);
6583 if (TREE_CODE (arg0) != INTEGER_CST)
6585 error ("non-constant argument 1 to __atomic_always_lock_free");
6586 return const0_rtx;
6589 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6590 if (size == boolean_true_node)
6591 return const1_rtx;
6592 return const0_rtx;
6595 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6596 is lock free on this architecture. */
6598 static tree
6599 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6601 if (!flag_inline_atomics)
6602 return NULL_TREE;
6604 /* If it isn't always lock free, don't generate a result. */
6605 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6606 return boolean_true_node;
6608 return NULL_TREE;
6611 /* Return true if the parameters to call EXP represent an object which will
6612 always generate lock free instructions. The first argument represents the
6613 size of the object, and the second parameter is a pointer to the object
6614 itself. If NULL is passed for the object, then the result is based on
6615 typical alignment for an object of the specified size. Otherwise return
6616 NULL*/
6618 static rtx
6619 expand_builtin_atomic_is_lock_free (tree exp)
6621 tree size;
6622 tree arg0 = CALL_EXPR_ARG (exp, 0);
6623 tree arg1 = CALL_EXPR_ARG (exp, 1);
6625 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6627 error ("non-integer argument 1 to __atomic_is_lock_free");
6628 return NULL_RTX;
6631 if (!flag_inline_atomics)
6632 return NULL_RTX;
6634 /* If the value is known at compile time, return the RTX for it. */
6635 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6636 if (size == boolean_true_node)
6637 return const1_rtx;
6639 return NULL_RTX;
6642 /* Expand the __atomic_thread_fence intrinsic:
6643 void __atomic_thread_fence (enum memmodel)
6644 EXP is the CALL_EXPR. */
6646 static void
6647 expand_builtin_atomic_thread_fence (tree exp)
6649 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6650 expand_mem_thread_fence (model);
6653 /* Expand the __atomic_signal_fence intrinsic:
6654 void __atomic_signal_fence (enum memmodel)
6655 EXP is the CALL_EXPR. */
6657 static void
6658 expand_builtin_atomic_signal_fence (tree exp)
6660 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6661 expand_mem_signal_fence (model);
6664 /* Expand the __sync_synchronize intrinsic. */
6666 static void
6667 expand_builtin_sync_synchronize (void)
6669 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6672 static rtx
6673 expand_builtin_thread_pointer (tree exp, rtx target)
6675 enum insn_code icode;
6676 if (!validate_arglist (exp, VOID_TYPE))
6677 return const0_rtx;
6678 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6679 if (icode != CODE_FOR_nothing)
6681 struct expand_operand op;
6682 /* If the target is not sutitable then create a new target. */
6683 if (target == NULL_RTX
6684 || !REG_P (target)
6685 || GET_MODE (target) != Pmode)
6686 target = gen_reg_rtx (Pmode);
6687 create_output_operand (&op, target, Pmode);
6688 expand_insn (icode, 1, &op);
6689 return target;
6691 error ("__builtin_thread_pointer is not supported on this target");
6692 return const0_rtx;
6695 static void
6696 expand_builtin_set_thread_pointer (tree exp)
6698 enum insn_code icode;
6699 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6700 return;
6701 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6702 if (icode != CODE_FOR_nothing)
6704 struct expand_operand op;
6705 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6706 Pmode, EXPAND_NORMAL);
6707 create_input_operand (&op, val, Pmode);
6708 expand_insn (icode, 1, &op);
6709 return;
6711 error ("__builtin_set_thread_pointer is not supported on this target");
6715 /* Emit code to restore the current value of stack. */
6717 static void
6718 expand_stack_restore (tree var)
6720 rtx_insn *prev;
6721 rtx sa = expand_normal (var);
6723 sa = convert_memory_address (Pmode, sa);
6725 prev = get_last_insn ();
6726 emit_stack_restore (SAVE_BLOCK, sa);
6728 record_new_stack_level ();
6730 fixup_args_size_notes (prev, get_last_insn (), 0);
6733 /* Emit code to save the current value of stack. */
6735 static rtx
6736 expand_stack_save (void)
6738 rtx ret = NULL_RTX;
6740 emit_stack_save (SAVE_BLOCK, &ret);
6741 return ret;
6744 /* Emit code to get the openacc gang, worker or vector id or size. */
6746 static rtx
6747 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6749 const char *name;
6750 rtx fallback_retval;
6751 rtx_insn *(*gen_fn) (rtx, rtx);
6752 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6754 case BUILT_IN_GOACC_PARLEVEL_ID:
6755 name = "__builtin_goacc_parlevel_id";
6756 fallback_retval = const0_rtx;
6757 gen_fn = targetm.gen_oacc_dim_pos;
6758 break;
6759 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6760 name = "__builtin_goacc_parlevel_size";
6761 fallback_retval = const1_rtx;
6762 gen_fn = targetm.gen_oacc_dim_size;
6763 break;
6764 default:
6765 gcc_unreachable ();
6768 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6770 error ("%qs only supported in OpenACC code", name);
6771 return const0_rtx;
6774 tree arg = CALL_EXPR_ARG (exp, 0);
6775 if (TREE_CODE (arg) != INTEGER_CST)
6777 error ("non-constant argument 0 to %qs", name);
6778 return const0_rtx;
6781 int dim = TREE_INT_CST_LOW (arg);
6782 switch (dim)
6784 case GOMP_DIM_GANG:
6785 case GOMP_DIM_WORKER:
6786 case GOMP_DIM_VECTOR:
6787 break;
6788 default:
6789 error ("illegal argument 0 to %qs", name);
6790 return const0_rtx;
6793 if (ignore)
6794 return target;
6796 if (target == NULL_RTX)
6797 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6799 if (!targetm.have_oacc_dim_size ())
6801 emit_move_insn (target, fallback_retval);
6802 return target;
6805 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6806 emit_insn (gen_fn (reg, GEN_INT (dim)));
6807 if (reg != target)
6808 emit_move_insn (target, reg);
6810 return target;
6813 /* Expand a string compare operation using a sequence of char comparison
6814 to get rid of the calling overhead, with result going to TARGET if
6815 that's convenient.
6817 VAR_STR is the variable string source;
6818 CONST_STR is the constant string source;
6819 LENGTH is the number of chars to compare;
6820 CONST_STR_N indicates which source string is the constant string;
6821 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6823 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6825 target = (int) (unsigned char) var_str[0]
6826 - (int) (unsigned char) const_str[0];
6827 if (target != 0)
6828 goto ne_label;
6830 target = (int) (unsigned char) var_str[length - 2]
6831 - (int) (unsigned char) const_str[length - 2];
6832 if (target != 0)
6833 goto ne_label;
6834 target = (int) (unsigned char) var_str[length - 1]
6835 - (int) (unsigned char) const_str[length - 1];
6836 ne_label:
6839 static rtx
6840 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6841 unsigned HOST_WIDE_INT length,
6842 int const_str_n, machine_mode mode)
6844 HOST_WIDE_INT offset = 0;
6845 rtx var_rtx_array
6846 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6847 rtx var_rtx = NULL_RTX;
6848 rtx const_rtx = NULL_RTX;
6849 rtx result = target ? target : gen_reg_rtx (mode);
6850 rtx_code_label *ne_label = gen_label_rtx ();
6851 tree unit_type_node = unsigned_char_type_node;
6852 scalar_int_mode unit_mode
6853 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6855 start_sequence ();
6857 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6859 var_rtx
6860 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6861 const_rtx = c_readstr (const_str + offset, unit_mode);
6862 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6863 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6865 op0 = convert_modes (mode, unit_mode, op0, 1);
6866 op1 = convert_modes (mode, unit_mode, op1, 1);
6867 result = expand_simple_binop (mode, MINUS, op0, op1,
6868 result, 1, OPTAB_WIDEN);
6869 if (i < length - 1)
6870 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6871 mode, true, ne_label);
6872 offset += GET_MODE_SIZE (unit_mode);
6875 emit_label (ne_label);
6876 rtx_insn *insns = get_insns ();
6877 end_sequence ();
6878 emit_insn (insns);
6880 return result;
6883 /* Inline expansion a call to str(n)cmp, with result going to
6884 TARGET if that's convenient.
6885 If the call is not been inlined, return NULL_RTX. */
6886 static rtx
6887 inline_expand_builtin_string_cmp (tree exp, rtx target)
6889 tree fndecl = get_callee_fndecl (exp);
6890 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6891 unsigned HOST_WIDE_INT length = 0;
6892 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6894 /* Do NOT apply this inlining expansion when optimizing for size or
6895 optimization level below 2. */
6896 if (optimize < 2 || optimize_insn_for_size_p ())
6897 return NULL_RTX;
6899 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6900 || fcode == BUILT_IN_STRNCMP
6901 || fcode == BUILT_IN_MEMCMP);
6903 /* On a target where the type of the call (int) has same or narrower presicion
6904 than unsigned char, give up the inlining expansion. */
6905 if (TYPE_PRECISION (unsigned_char_type_node)
6906 >= TYPE_PRECISION (TREE_TYPE (exp)))
6907 return NULL_RTX;
6909 tree arg1 = CALL_EXPR_ARG (exp, 0);
6910 tree arg2 = CALL_EXPR_ARG (exp, 1);
6911 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6913 unsigned HOST_WIDE_INT len1 = 0;
6914 unsigned HOST_WIDE_INT len2 = 0;
6915 unsigned HOST_WIDE_INT len3 = 0;
6917 const char *src_str1 = c_getstr (arg1, &len1);
6918 const char *src_str2 = c_getstr (arg2, &len2);
6920 /* If neither strings is constant string, the call is not qualify. */
6921 if (!src_str1 && !src_str2)
6922 return NULL_RTX;
6924 /* For strncmp, if the length is not a const, not qualify. */
6925 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6926 return NULL_RTX;
6928 int const_str_n = 0;
6929 if (!len1)
6930 const_str_n = 2;
6931 else if (!len2)
6932 const_str_n = 1;
6933 else if (len2 > len1)
6934 const_str_n = 1;
6935 else
6936 const_str_n = 2;
6938 gcc_checking_assert (const_str_n > 0);
6939 length = (const_str_n == 1) ? len1 : len2;
6941 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6942 length = len3;
6944 /* If the length of the comparision is larger than the threshold,
6945 do nothing. */
6946 if (length > (unsigned HOST_WIDE_INT)
6947 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6948 return NULL_RTX;
6950 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6952 /* Now, start inline expansion the call. */
6953 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6954 (const_str_n == 1) ? src_str1 : src_str2, length,
6955 const_str_n, mode);
6958 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6959 represents the size of the first argument to that call, or VOIDmode
6960 if the argument is a pointer. IGNORE will be true if the result
6961 isn't used. */
6962 static rtx
6963 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6964 bool ignore)
6966 rtx val, failsafe;
6967 unsigned nargs = call_expr_nargs (exp);
6969 tree arg0 = CALL_EXPR_ARG (exp, 0);
6971 if (mode == VOIDmode)
6973 mode = TYPE_MODE (TREE_TYPE (arg0));
6974 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6977 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6979 /* An optional second argument can be used as a failsafe value on
6980 some machines. If it isn't present, then the failsafe value is
6981 assumed to be 0. */
6982 if (nargs > 1)
6984 tree arg1 = CALL_EXPR_ARG (exp, 1);
6985 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6987 else
6988 failsafe = const0_rtx;
6990 /* If the result isn't used, the behavior is undefined. It would be
6991 nice to emit a warning here, but path splitting means this might
6992 happen with legitimate code. So simply drop the builtin
6993 expansion in that case; we've handled any side-effects above. */
6994 if (ignore)
6995 return const0_rtx;
6997 /* If we don't have a suitable target, create one to hold the result. */
6998 if (target == NULL || GET_MODE (target) != mode)
6999 target = gen_reg_rtx (mode);
7001 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7002 val = convert_modes (mode, VOIDmode, val, false);
7004 return targetm.speculation_safe_value (mode, target, val, failsafe);
7007 /* Expand an expression EXP that calls a built-in function,
7008 with result going to TARGET if that's convenient
7009 (and in mode MODE if that's convenient).
7010 SUBTARGET may be used as the target for computing one of EXP's operands.
7011 IGNORE is nonzero if the value is to be ignored. */
7014 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7015 int ignore)
7017 tree fndecl = get_callee_fndecl (exp);
7018 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7019 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7020 int flags;
7022 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7023 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7025 /* When ASan is enabled, we don't want to expand some memory/string
7026 builtins and rely on libsanitizer's hooks. This allows us to avoid
7027 redundant checks and be sure, that possible overflow will be detected
7028 by ASan. */
7030 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7031 return expand_call (exp, target, ignore);
7033 /* When not optimizing, generate calls to library functions for a certain
7034 set of builtins. */
7035 if (!optimize
7036 && !called_as_built_in (fndecl)
7037 && fcode != BUILT_IN_FORK
7038 && fcode != BUILT_IN_EXECL
7039 && fcode != BUILT_IN_EXECV
7040 && fcode != BUILT_IN_EXECLP
7041 && fcode != BUILT_IN_EXECLE
7042 && fcode != BUILT_IN_EXECVP
7043 && fcode != BUILT_IN_EXECVE
7044 && !ALLOCA_FUNCTION_CODE_P (fcode)
7045 && fcode != BUILT_IN_FREE)
7046 return expand_call (exp, target, ignore);
7048 /* The built-in function expanders test for target == const0_rtx
7049 to determine whether the function's result will be ignored. */
7050 if (ignore)
7051 target = const0_rtx;
7053 /* If the result of a pure or const built-in function is ignored, and
7054 none of its arguments are volatile, we can avoid expanding the
7055 built-in call and just evaluate the arguments for side-effects. */
7056 if (target == const0_rtx
7057 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7058 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7060 bool volatilep = false;
7061 tree arg;
7062 call_expr_arg_iterator iter;
7064 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7065 if (TREE_THIS_VOLATILE (arg))
7067 volatilep = true;
7068 break;
7071 if (! volatilep)
7073 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7074 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7075 return const0_rtx;
7079 switch (fcode)
7081 CASE_FLT_FN (BUILT_IN_FABS):
7082 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7083 case BUILT_IN_FABSD32:
7084 case BUILT_IN_FABSD64:
7085 case BUILT_IN_FABSD128:
7086 target = expand_builtin_fabs (exp, target, subtarget);
7087 if (target)
7088 return target;
7089 break;
7091 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7092 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7093 target = expand_builtin_copysign (exp, target, subtarget);
7094 if (target)
7095 return target;
7096 break;
7098 /* Just do a normal library call if we were unable to fold
7099 the values. */
7100 CASE_FLT_FN (BUILT_IN_CABS):
7101 break;
7103 CASE_FLT_FN (BUILT_IN_FMA):
7104 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7105 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7106 if (target)
7107 return target;
7108 break;
7110 CASE_FLT_FN (BUILT_IN_ILOGB):
7111 if (! flag_unsafe_math_optimizations)
7112 break;
7113 gcc_fallthrough ();
7114 CASE_FLT_FN (BUILT_IN_ISINF):
7115 CASE_FLT_FN (BUILT_IN_FINITE):
7116 case BUILT_IN_ISFINITE:
7117 case BUILT_IN_ISNORMAL:
7118 target = expand_builtin_interclass_mathfn (exp, target);
7119 if (target)
7120 return target;
7121 break;
7123 CASE_FLT_FN (BUILT_IN_ICEIL):
7124 CASE_FLT_FN (BUILT_IN_LCEIL):
7125 CASE_FLT_FN (BUILT_IN_LLCEIL):
7126 CASE_FLT_FN (BUILT_IN_LFLOOR):
7127 CASE_FLT_FN (BUILT_IN_IFLOOR):
7128 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7129 target = expand_builtin_int_roundingfn (exp, target);
7130 if (target)
7131 return target;
7132 break;
7134 CASE_FLT_FN (BUILT_IN_IRINT):
7135 CASE_FLT_FN (BUILT_IN_LRINT):
7136 CASE_FLT_FN (BUILT_IN_LLRINT):
7137 CASE_FLT_FN (BUILT_IN_IROUND):
7138 CASE_FLT_FN (BUILT_IN_LROUND):
7139 CASE_FLT_FN (BUILT_IN_LLROUND):
7140 target = expand_builtin_int_roundingfn_2 (exp, target);
7141 if (target)
7142 return target;
7143 break;
7145 CASE_FLT_FN (BUILT_IN_POWI):
7146 target = expand_builtin_powi (exp, target);
7147 if (target)
7148 return target;
7149 break;
7151 CASE_FLT_FN (BUILT_IN_CEXPI):
7152 target = expand_builtin_cexpi (exp, target);
7153 gcc_assert (target);
7154 return target;
7156 CASE_FLT_FN (BUILT_IN_SIN):
7157 CASE_FLT_FN (BUILT_IN_COS):
7158 if (! flag_unsafe_math_optimizations)
7159 break;
7160 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7161 if (target)
7162 return target;
7163 break;
7165 CASE_FLT_FN (BUILT_IN_SINCOS):
7166 if (! flag_unsafe_math_optimizations)
7167 break;
7168 target = expand_builtin_sincos (exp);
7169 if (target)
7170 return target;
7171 break;
7173 case BUILT_IN_APPLY_ARGS:
7174 return expand_builtin_apply_args ();
7176 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7177 FUNCTION with a copy of the parameters described by
7178 ARGUMENTS, and ARGSIZE. It returns a block of memory
7179 allocated on the stack into which is stored all the registers
7180 that might possibly be used for returning the result of a
7181 function. ARGUMENTS is the value returned by
7182 __builtin_apply_args. ARGSIZE is the number of bytes of
7183 arguments that must be copied. ??? How should this value be
7184 computed? We'll also need a safe worst case value for varargs
7185 functions. */
7186 case BUILT_IN_APPLY:
7187 if (!validate_arglist (exp, POINTER_TYPE,
7188 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7189 && !validate_arglist (exp, REFERENCE_TYPE,
7190 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7191 return const0_rtx;
7192 else
7194 rtx ops[3];
7196 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7197 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7198 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7200 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7203 /* __builtin_return (RESULT) causes the function to return the
7204 value described by RESULT. RESULT is address of the block of
7205 memory returned by __builtin_apply. */
7206 case BUILT_IN_RETURN:
7207 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7208 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7209 return const0_rtx;
7211 case BUILT_IN_SAVEREGS:
7212 return expand_builtin_saveregs ();
7214 case BUILT_IN_VA_ARG_PACK:
7215 /* All valid uses of __builtin_va_arg_pack () are removed during
7216 inlining. */
7217 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7218 return const0_rtx;
7220 case BUILT_IN_VA_ARG_PACK_LEN:
7221 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7222 inlining. */
7223 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7224 return const0_rtx;
7226 /* Return the address of the first anonymous stack arg. */
7227 case BUILT_IN_NEXT_ARG:
7228 if (fold_builtin_next_arg (exp, false))
7229 return const0_rtx;
7230 return expand_builtin_next_arg ();
7232 case BUILT_IN_CLEAR_CACHE:
7233 target = expand_builtin___clear_cache (exp);
7234 if (target)
7235 return target;
7236 break;
7238 case BUILT_IN_CLASSIFY_TYPE:
7239 return expand_builtin_classify_type (exp);
7241 case BUILT_IN_CONSTANT_P:
7242 return const0_rtx;
7244 case BUILT_IN_FRAME_ADDRESS:
7245 case BUILT_IN_RETURN_ADDRESS:
7246 return expand_builtin_frame_address (fndecl, exp);
7248 /* Returns the address of the area where the structure is returned.
7249 0 otherwise. */
7250 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7251 if (call_expr_nargs (exp) != 0
7252 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7253 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7254 return const0_rtx;
7255 else
7256 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7258 CASE_BUILT_IN_ALLOCA:
7259 target = expand_builtin_alloca (exp);
7260 if (target)
7261 return target;
7262 break;
7264 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7265 return expand_asan_emit_allocas_unpoison (exp);
7267 case BUILT_IN_STACK_SAVE:
7268 return expand_stack_save ();
7270 case BUILT_IN_STACK_RESTORE:
7271 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7272 return const0_rtx;
7274 case BUILT_IN_BSWAP16:
7275 case BUILT_IN_BSWAP32:
7276 case BUILT_IN_BSWAP64:
7277 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7278 if (target)
7279 return target;
7280 break;
7282 CASE_INT_FN (BUILT_IN_FFS):
7283 target = expand_builtin_unop (target_mode, exp, target,
7284 subtarget, ffs_optab);
7285 if (target)
7286 return target;
7287 break;
7289 CASE_INT_FN (BUILT_IN_CLZ):
7290 target = expand_builtin_unop (target_mode, exp, target,
7291 subtarget, clz_optab);
7292 if (target)
7293 return target;
7294 break;
7296 CASE_INT_FN (BUILT_IN_CTZ):
7297 target = expand_builtin_unop (target_mode, exp, target,
7298 subtarget, ctz_optab);
7299 if (target)
7300 return target;
7301 break;
7303 CASE_INT_FN (BUILT_IN_CLRSB):
7304 target = expand_builtin_unop (target_mode, exp, target,
7305 subtarget, clrsb_optab);
7306 if (target)
7307 return target;
7308 break;
7310 CASE_INT_FN (BUILT_IN_POPCOUNT):
7311 target = expand_builtin_unop (target_mode, exp, target,
7312 subtarget, popcount_optab);
7313 if (target)
7314 return target;
7315 break;
7317 CASE_INT_FN (BUILT_IN_PARITY):
7318 target = expand_builtin_unop (target_mode, exp, target,
7319 subtarget, parity_optab);
7320 if (target)
7321 return target;
7322 break;
7324 case BUILT_IN_STRLEN:
7325 target = expand_builtin_strlen (exp, target, target_mode);
7326 if (target)
7327 return target;
7328 break;
7330 case BUILT_IN_STRNLEN:
7331 target = expand_builtin_strnlen (exp, target, target_mode);
7332 if (target)
7333 return target;
7334 break;
7336 case BUILT_IN_STRCAT:
7337 target = expand_builtin_strcat (exp, target);
7338 if (target)
7339 return target;
7340 break;
7342 case BUILT_IN_STRCPY:
7343 target = expand_builtin_strcpy (exp, target);
7344 if (target)
7345 return target;
7346 break;
7348 case BUILT_IN_STRNCAT:
7349 target = expand_builtin_strncat (exp, target);
7350 if (target)
7351 return target;
7352 break;
7354 case BUILT_IN_STRNCPY:
7355 target = expand_builtin_strncpy (exp, target);
7356 if (target)
7357 return target;
7358 break;
7360 case BUILT_IN_STPCPY:
7361 target = expand_builtin_stpcpy (exp, target, mode);
7362 if (target)
7363 return target;
7364 break;
7366 case BUILT_IN_STPNCPY:
7367 target = expand_builtin_stpncpy (exp, target);
7368 if (target)
7369 return target;
7370 break;
7372 case BUILT_IN_MEMCHR:
7373 target = expand_builtin_memchr (exp, target);
7374 if (target)
7375 return target;
7376 break;
7378 case BUILT_IN_MEMCPY:
7379 target = expand_builtin_memcpy (exp, target);
7380 if (target)
7381 return target;
7382 break;
7384 case BUILT_IN_MEMMOVE:
7385 target = expand_builtin_memmove (exp, target);
7386 if (target)
7387 return target;
7388 break;
7390 case BUILT_IN_MEMPCPY:
7391 target = expand_builtin_mempcpy (exp, target);
7392 if (target)
7393 return target;
7394 break;
7396 case BUILT_IN_MEMSET:
7397 target = expand_builtin_memset (exp, target, mode);
7398 if (target)
7399 return target;
7400 break;
7402 case BUILT_IN_BZERO:
7403 target = expand_builtin_bzero (exp);
7404 if (target)
7405 return target;
7406 break;
7408 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7409 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7410 when changing it to a strcmp call. */
7411 case BUILT_IN_STRCMP_EQ:
7412 target = expand_builtin_memcmp (exp, target, true);
7413 if (target)
7414 return target;
7416 /* Change this call back to a BUILT_IN_STRCMP. */
7417 TREE_OPERAND (exp, 1)
7418 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7420 /* Delete the last parameter. */
7421 unsigned int i;
7422 vec<tree, va_gc> *arg_vec;
7423 vec_alloc (arg_vec, 2);
7424 for (i = 0; i < 2; i++)
7425 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7426 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7427 /* FALLTHROUGH */
7429 case BUILT_IN_STRCMP:
7430 target = expand_builtin_strcmp (exp, target);
7431 if (target)
7432 return target;
7433 break;
7435 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7436 back to a BUILT_IN_STRNCMP. */
7437 case BUILT_IN_STRNCMP_EQ:
7438 target = expand_builtin_memcmp (exp, target, true);
7439 if (target)
7440 return target;
7442 /* Change it back to a BUILT_IN_STRNCMP. */
7443 TREE_OPERAND (exp, 1)
7444 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7445 /* FALLTHROUGH */
7447 case BUILT_IN_STRNCMP:
7448 target = expand_builtin_strncmp (exp, target, mode);
7449 if (target)
7450 return target;
7451 break;
7453 case BUILT_IN_BCMP:
7454 case BUILT_IN_MEMCMP:
7455 case BUILT_IN_MEMCMP_EQ:
7456 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7457 if (target)
7458 return target;
7459 if (fcode == BUILT_IN_MEMCMP_EQ)
7461 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7462 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7464 break;
7466 case BUILT_IN_SETJMP:
7467 /* This should have been lowered to the builtins below. */
7468 gcc_unreachable ();
7470 case BUILT_IN_SETJMP_SETUP:
7471 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7472 and the receiver label. */
7473 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7475 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7476 VOIDmode, EXPAND_NORMAL);
7477 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7478 rtx_insn *label_r = label_rtx (label);
7480 /* This is copied from the handling of non-local gotos. */
7481 expand_builtin_setjmp_setup (buf_addr, label_r);
7482 nonlocal_goto_handler_labels
7483 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7484 nonlocal_goto_handler_labels);
7485 /* ??? Do not let expand_label treat us as such since we would
7486 not want to be both on the list of non-local labels and on
7487 the list of forced labels. */
7488 FORCED_LABEL (label) = 0;
7489 return const0_rtx;
7491 break;
7493 case BUILT_IN_SETJMP_RECEIVER:
7494 /* __builtin_setjmp_receiver is passed the receiver label. */
7495 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7497 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7498 rtx_insn *label_r = label_rtx (label);
7500 expand_builtin_setjmp_receiver (label_r);
7501 return const0_rtx;
7503 break;
7505 /* __builtin_longjmp is passed a pointer to an array of five words.
7506 It's similar to the C library longjmp function but works with
7507 __builtin_setjmp above. */
7508 case BUILT_IN_LONGJMP:
7509 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7511 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7512 VOIDmode, EXPAND_NORMAL);
7513 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7515 if (value != const1_rtx)
7517 error ("%<__builtin_longjmp%> second argument must be 1");
7518 return const0_rtx;
7521 expand_builtin_longjmp (buf_addr, value);
7522 return const0_rtx;
7524 break;
7526 case BUILT_IN_NONLOCAL_GOTO:
7527 target = expand_builtin_nonlocal_goto (exp);
7528 if (target)
7529 return target;
7530 break;
7532 /* This updates the setjmp buffer that is its argument with the value
7533 of the current stack pointer. */
7534 case BUILT_IN_UPDATE_SETJMP_BUF:
7535 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7537 rtx buf_addr
7538 = expand_normal (CALL_EXPR_ARG (exp, 0));
7540 expand_builtin_update_setjmp_buf (buf_addr);
7541 return const0_rtx;
7543 break;
7545 case BUILT_IN_TRAP:
7546 expand_builtin_trap ();
7547 return const0_rtx;
7549 case BUILT_IN_UNREACHABLE:
7550 expand_builtin_unreachable ();
7551 return const0_rtx;
7553 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7554 case BUILT_IN_SIGNBITD32:
7555 case BUILT_IN_SIGNBITD64:
7556 case BUILT_IN_SIGNBITD128:
7557 target = expand_builtin_signbit (exp, target);
7558 if (target)
7559 return target;
7560 break;
7562 /* Various hooks for the DWARF 2 __throw routine. */
7563 case BUILT_IN_UNWIND_INIT:
7564 expand_builtin_unwind_init ();
7565 return const0_rtx;
7566 case BUILT_IN_DWARF_CFA:
7567 return virtual_cfa_rtx;
7568 #ifdef DWARF2_UNWIND_INFO
7569 case BUILT_IN_DWARF_SP_COLUMN:
7570 return expand_builtin_dwarf_sp_column ();
7571 case BUILT_IN_INIT_DWARF_REG_SIZES:
7572 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7573 return const0_rtx;
7574 #endif
7575 case BUILT_IN_FROB_RETURN_ADDR:
7576 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7577 case BUILT_IN_EXTRACT_RETURN_ADDR:
7578 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7579 case BUILT_IN_EH_RETURN:
7580 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7581 CALL_EXPR_ARG (exp, 1));
7582 return const0_rtx;
7583 case BUILT_IN_EH_RETURN_DATA_REGNO:
7584 return expand_builtin_eh_return_data_regno (exp);
7585 case BUILT_IN_EXTEND_POINTER:
7586 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7587 case BUILT_IN_EH_POINTER:
7588 return expand_builtin_eh_pointer (exp);
7589 case BUILT_IN_EH_FILTER:
7590 return expand_builtin_eh_filter (exp);
7591 case BUILT_IN_EH_COPY_VALUES:
7592 return expand_builtin_eh_copy_values (exp);
7594 case BUILT_IN_VA_START:
7595 return expand_builtin_va_start (exp);
7596 case BUILT_IN_VA_END:
7597 return expand_builtin_va_end (exp);
7598 case BUILT_IN_VA_COPY:
7599 return expand_builtin_va_copy (exp);
7600 case BUILT_IN_EXPECT:
7601 return expand_builtin_expect (exp, target);
7602 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7603 return expand_builtin_expect_with_probability (exp, target);
7604 case BUILT_IN_ASSUME_ALIGNED:
7605 return expand_builtin_assume_aligned (exp, target);
7606 case BUILT_IN_PREFETCH:
7607 expand_builtin_prefetch (exp);
7608 return const0_rtx;
7610 case BUILT_IN_INIT_TRAMPOLINE:
7611 return expand_builtin_init_trampoline (exp, true);
7612 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7613 return expand_builtin_init_trampoline (exp, false);
7614 case BUILT_IN_ADJUST_TRAMPOLINE:
7615 return expand_builtin_adjust_trampoline (exp);
7617 case BUILT_IN_INIT_DESCRIPTOR:
7618 return expand_builtin_init_descriptor (exp);
7619 case BUILT_IN_ADJUST_DESCRIPTOR:
7620 return expand_builtin_adjust_descriptor (exp);
7622 case BUILT_IN_FORK:
7623 case BUILT_IN_EXECL:
7624 case BUILT_IN_EXECV:
7625 case BUILT_IN_EXECLP:
7626 case BUILT_IN_EXECLE:
7627 case BUILT_IN_EXECVP:
7628 case BUILT_IN_EXECVE:
7629 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7630 if (target)
7631 return target;
7632 break;
7634 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7635 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7636 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7637 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7638 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7639 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7640 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7641 if (target)
7642 return target;
7643 break;
7645 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7646 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7647 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7648 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7649 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7650 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7651 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7652 if (target)
7653 return target;
7654 break;
7656 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7657 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7658 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7659 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7660 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7661 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7662 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7663 if (target)
7664 return target;
7665 break;
7667 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7668 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7669 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7670 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7671 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7672 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7673 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7674 if (target)
7675 return target;
7676 break;
7678 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7679 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7680 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7681 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7682 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7683 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7684 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7685 if (target)
7686 return target;
7687 break;
7689 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7690 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7691 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7692 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7693 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7694 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7695 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7696 if (target)
7697 return target;
7698 break;
7700 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7701 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7702 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7703 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7704 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7705 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7706 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7707 if (target)
7708 return target;
7709 break;
7711 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7712 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7713 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7714 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7715 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7716 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7717 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7718 if (target)
7719 return target;
7720 break;
7722 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7723 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7724 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7725 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7726 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7727 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7728 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7729 if (target)
7730 return target;
7731 break;
7733 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7734 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7735 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7736 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7737 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7738 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7739 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7740 if (target)
7741 return target;
7742 break;
7744 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7745 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7746 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7747 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7748 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7749 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7750 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7751 if (target)
7752 return target;
7753 break;
7755 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7756 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7757 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7758 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7759 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7760 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7761 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7762 if (target)
7763 return target;
7764 break;
7766 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7767 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7768 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7769 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7770 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7771 if (mode == VOIDmode)
7772 mode = TYPE_MODE (boolean_type_node);
7773 if (!target || !register_operand (target, mode))
7774 target = gen_reg_rtx (mode);
7776 mode = get_builtin_sync_mode
7777 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7778 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7779 if (target)
7780 return target;
7781 break;
7783 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7784 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7785 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7786 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7787 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7788 mode = get_builtin_sync_mode
7789 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7790 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7791 if (target)
7792 return target;
7793 break;
7795 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7796 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7797 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7798 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7799 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7800 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7801 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7802 if (target)
7803 return target;
7804 break;
7806 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7807 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7808 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7809 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7810 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7811 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7812 expand_builtin_sync_lock_release (mode, exp);
7813 return const0_rtx;
7815 case BUILT_IN_SYNC_SYNCHRONIZE:
7816 expand_builtin_sync_synchronize ();
7817 return const0_rtx;
7819 case BUILT_IN_ATOMIC_EXCHANGE_1:
7820 case BUILT_IN_ATOMIC_EXCHANGE_2:
7821 case BUILT_IN_ATOMIC_EXCHANGE_4:
7822 case BUILT_IN_ATOMIC_EXCHANGE_8:
7823 case BUILT_IN_ATOMIC_EXCHANGE_16:
7824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7825 target = expand_builtin_atomic_exchange (mode, exp, target);
7826 if (target)
7827 return target;
7828 break;
7830 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7831 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7832 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7833 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7834 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7836 unsigned int nargs, z;
7837 vec<tree, va_gc> *vec;
7839 mode =
7840 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7841 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7842 if (target)
7843 return target;
7845 /* If this is turned into an external library call, the weak parameter
7846 must be dropped to match the expected parameter list. */
7847 nargs = call_expr_nargs (exp);
7848 vec_alloc (vec, nargs - 1);
7849 for (z = 0; z < 3; z++)
7850 vec->quick_push (CALL_EXPR_ARG (exp, z));
7851 /* Skip the boolean weak parameter. */
7852 for (z = 4; z < 6; z++)
7853 vec->quick_push (CALL_EXPR_ARG (exp, z));
7854 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7855 break;
7858 case BUILT_IN_ATOMIC_LOAD_1:
7859 case BUILT_IN_ATOMIC_LOAD_2:
7860 case BUILT_IN_ATOMIC_LOAD_4:
7861 case BUILT_IN_ATOMIC_LOAD_8:
7862 case BUILT_IN_ATOMIC_LOAD_16:
7863 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7864 target = expand_builtin_atomic_load (mode, exp, target);
7865 if (target)
7866 return target;
7867 break;
7869 case BUILT_IN_ATOMIC_STORE_1:
7870 case BUILT_IN_ATOMIC_STORE_2:
7871 case BUILT_IN_ATOMIC_STORE_4:
7872 case BUILT_IN_ATOMIC_STORE_8:
7873 case BUILT_IN_ATOMIC_STORE_16:
7874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7875 target = expand_builtin_atomic_store (mode, exp);
7876 if (target)
7877 return const0_rtx;
7878 break;
7880 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7881 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7882 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7883 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7884 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7886 enum built_in_function lib;
7887 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7888 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7889 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7890 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7891 ignore, lib);
7892 if (target)
7893 return target;
7894 break;
7896 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7897 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7898 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7899 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7900 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7902 enum built_in_function lib;
7903 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7904 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7905 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7906 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7907 ignore, lib);
7908 if (target)
7909 return target;
7910 break;
7912 case BUILT_IN_ATOMIC_AND_FETCH_1:
7913 case BUILT_IN_ATOMIC_AND_FETCH_2:
7914 case BUILT_IN_ATOMIC_AND_FETCH_4:
7915 case BUILT_IN_ATOMIC_AND_FETCH_8:
7916 case BUILT_IN_ATOMIC_AND_FETCH_16:
7918 enum built_in_function lib;
7919 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7920 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7921 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7922 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7923 ignore, lib);
7924 if (target)
7925 return target;
7926 break;
7928 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7929 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7930 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7931 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7932 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7934 enum built_in_function lib;
7935 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7936 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7937 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7938 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7939 ignore, lib);
7940 if (target)
7941 return target;
7942 break;
7944 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7945 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7946 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7947 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7948 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7950 enum built_in_function lib;
7951 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7952 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7953 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7954 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7955 ignore, lib);
7956 if (target)
7957 return target;
7958 break;
7960 case BUILT_IN_ATOMIC_OR_FETCH_1:
7961 case BUILT_IN_ATOMIC_OR_FETCH_2:
7962 case BUILT_IN_ATOMIC_OR_FETCH_4:
7963 case BUILT_IN_ATOMIC_OR_FETCH_8:
7964 case BUILT_IN_ATOMIC_OR_FETCH_16:
7966 enum built_in_function lib;
7967 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7968 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7969 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7970 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7971 ignore, lib);
7972 if (target)
7973 return target;
7974 break;
7976 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7977 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7978 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7979 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7980 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7981 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7982 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7983 ignore, BUILT_IN_NONE);
7984 if (target)
7985 return target;
7986 break;
7988 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7989 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7990 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7991 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7992 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7993 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7994 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7995 ignore, BUILT_IN_NONE);
7996 if (target)
7997 return target;
7998 break;
8000 case BUILT_IN_ATOMIC_FETCH_AND_1:
8001 case BUILT_IN_ATOMIC_FETCH_AND_2:
8002 case BUILT_IN_ATOMIC_FETCH_AND_4:
8003 case BUILT_IN_ATOMIC_FETCH_AND_8:
8004 case BUILT_IN_ATOMIC_FETCH_AND_16:
8005 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8006 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8007 ignore, BUILT_IN_NONE);
8008 if (target)
8009 return target;
8010 break;
8012 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8013 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8014 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8015 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8016 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8017 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8018 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8019 ignore, BUILT_IN_NONE);
8020 if (target)
8021 return target;
8022 break;
8024 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8025 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8026 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8027 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8028 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8029 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8030 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8031 ignore, BUILT_IN_NONE);
8032 if (target)
8033 return target;
8034 break;
8036 case BUILT_IN_ATOMIC_FETCH_OR_1:
8037 case BUILT_IN_ATOMIC_FETCH_OR_2:
8038 case BUILT_IN_ATOMIC_FETCH_OR_4:
8039 case BUILT_IN_ATOMIC_FETCH_OR_8:
8040 case BUILT_IN_ATOMIC_FETCH_OR_16:
8041 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8042 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8043 ignore, BUILT_IN_NONE);
8044 if (target)
8045 return target;
8046 break;
8048 case BUILT_IN_ATOMIC_TEST_AND_SET:
8049 return expand_builtin_atomic_test_and_set (exp, target);
8051 case BUILT_IN_ATOMIC_CLEAR:
8052 return expand_builtin_atomic_clear (exp);
8054 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8055 return expand_builtin_atomic_always_lock_free (exp);
8057 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8058 target = expand_builtin_atomic_is_lock_free (exp);
8059 if (target)
8060 return target;
8061 break;
8063 case BUILT_IN_ATOMIC_THREAD_FENCE:
8064 expand_builtin_atomic_thread_fence (exp);
8065 return const0_rtx;
8067 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8068 expand_builtin_atomic_signal_fence (exp);
8069 return const0_rtx;
8071 case BUILT_IN_OBJECT_SIZE:
8072 return expand_builtin_object_size (exp);
8074 case BUILT_IN_MEMCPY_CHK:
8075 case BUILT_IN_MEMPCPY_CHK:
8076 case BUILT_IN_MEMMOVE_CHK:
8077 case BUILT_IN_MEMSET_CHK:
8078 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8079 if (target)
8080 return target;
8081 break;
8083 case BUILT_IN_STRCPY_CHK:
8084 case BUILT_IN_STPCPY_CHK:
8085 case BUILT_IN_STRNCPY_CHK:
8086 case BUILT_IN_STPNCPY_CHK:
8087 case BUILT_IN_STRCAT_CHK:
8088 case BUILT_IN_STRNCAT_CHK:
8089 case BUILT_IN_SNPRINTF_CHK:
8090 case BUILT_IN_VSNPRINTF_CHK:
8091 maybe_emit_chk_warning (exp, fcode);
8092 break;
8094 case BUILT_IN_SPRINTF_CHK:
8095 case BUILT_IN_VSPRINTF_CHK:
8096 maybe_emit_sprintf_chk_warning (exp, fcode);
8097 break;
8099 case BUILT_IN_FREE:
8100 if (warn_free_nonheap_object)
8101 maybe_emit_free_warning (exp);
8102 break;
8104 case BUILT_IN_THREAD_POINTER:
8105 return expand_builtin_thread_pointer (exp, target);
8107 case BUILT_IN_SET_THREAD_POINTER:
8108 expand_builtin_set_thread_pointer (exp);
8109 return const0_rtx;
8111 case BUILT_IN_ACC_ON_DEVICE:
8112 /* Do library call, if we failed to expand the builtin when
8113 folding. */
8114 break;
8116 case BUILT_IN_GOACC_PARLEVEL_ID:
8117 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8118 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8120 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8121 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8123 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8124 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8125 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8126 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8127 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8128 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8129 return expand_speculation_safe_value (mode, exp, target, ignore);
8131 default: /* just do library call, if unknown builtin */
8132 break;
8135 /* The switch statement above can drop through to cause the function
8136 to be called normally. */
8137 return expand_call (exp, target, ignore);
8140 /* Determine whether a tree node represents a call to a built-in
8141 function. If the tree T is a call to a built-in function with
8142 the right number of arguments of the appropriate types, return
8143 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8144 Otherwise the return value is END_BUILTINS. */
8146 enum built_in_function
8147 builtin_mathfn_code (const_tree t)
8149 const_tree fndecl, arg, parmlist;
8150 const_tree argtype, parmtype;
8151 const_call_expr_arg_iterator iter;
8153 if (TREE_CODE (t) != CALL_EXPR)
8154 return END_BUILTINS;
8156 fndecl = get_callee_fndecl (t);
8157 if (fndecl == NULL_TREE
8158 || TREE_CODE (fndecl) != FUNCTION_DECL
8159 || ! DECL_BUILT_IN (fndecl)
8160 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8161 return END_BUILTINS;
8163 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8164 init_const_call_expr_arg_iterator (t, &iter);
8165 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8167 /* If a function doesn't take a variable number of arguments,
8168 the last element in the list will have type `void'. */
8169 parmtype = TREE_VALUE (parmlist);
8170 if (VOID_TYPE_P (parmtype))
8172 if (more_const_call_expr_args_p (&iter))
8173 return END_BUILTINS;
8174 return DECL_FUNCTION_CODE (fndecl);
8177 if (! more_const_call_expr_args_p (&iter))
8178 return END_BUILTINS;
8180 arg = next_const_call_expr_arg (&iter);
8181 argtype = TREE_TYPE (arg);
8183 if (SCALAR_FLOAT_TYPE_P (parmtype))
8185 if (! SCALAR_FLOAT_TYPE_P (argtype))
8186 return END_BUILTINS;
8188 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8190 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8191 return END_BUILTINS;
8193 else if (POINTER_TYPE_P (parmtype))
8195 if (! POINTER_TYPE_P (argtype))
8196 return END_BUILTINS;
8198 else if (INTEGRAL_TYPE_P (parmtype))
8200 if (! INTEGRAL_TYPE_P (argtype))
8201 return END_BUILTINS;
8203 else
8204 return END_BUILTINS;
8207 /* Variable-length argument list. */
8208 return DECL_FUNCTION_CODE (fndecl);
8211 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8212 evaluate to a constant. */
8214 static tree
8215 fold_builtin_constant_p (tree arg)
8217 /* We return 1 for a numeric type that's known to be a constant
8218 value at compile-time or for an aggregate type that's a
8219 literal constant. */
8220 STRIP_NOPS (arg);
8222 /* If we know this is a constant, emit the constant of one. */
8223 if (CONSTANT_CLASS_P (arg)
8224 || (TREE_CODE (arg) == CONSTRUCTOR
8225 && TREE_CONSTANT (arg)))
8226 return integer_one_node;
8227 if (TREE_CODE (arg) == ADDR_EXPR)
8229 tree op = TREE_OPERAND (arg, 0);
8230 if (TREE_CODE (op) == STRING_CST
8231 || (TREE_CODE (op) == ARRAY_REF
8232 && integer_zerop (TREE_OPERAND (op, 1))
8233 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8234 return integer_one_node;
8237 /* If this expression has side effects, show we don't know it to be a
8238 constant. Likewise if it's a pointer or aggregate type since in
8239 those case we only want literals, since those are only optimized
8240 when generating RTL, not later.
8241 And finally, if we are compiling an initializer, not code, we
8242 need to return a definite result now; there's not going to be any
8243 more optimization done. */
8244 if (TREE_SIDE_EFFECTS (arg)
8245 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8246 || POINTER_TYPE_P (TREE_TYPE (arg))
8247 || cfun == 0
8248 || folding_initializer
8249 || force_folding_builtin_constant_p)
8250 return integer_zero_node;
8252 return NULL_TREE;
8255 /* Create builtin_expect or builtin_expect_with_probability
8256 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8257 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8258 builtin_expect_with_probability instead uses third argument as PROBABILITY
8259 value. */
8261 static tree
8262 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8263 tree predictor, tree probability)
8265 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8267 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8268 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8269 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8270 ret_type = TREE_TYPE (TREE_TYPE (fn));
8271 pred_type = TREE_VALUE (arg_types);
8272 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8274 pred = fold_convert_loc (loc, pred_type, pred);
8275 expected = fold_convert_loc (loc, expected_type, expected);
8277 if (probability)
8278 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8279 else
8280 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8281 predictor);
8283 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8284 build_int_cst (ret_type, 0));
8287 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8288 NULL_TREE if no simplification is possible. */
8290 tree
8291 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8292 tree arg3)
8294 tree inner, fndecl, inner_arg0;
8295 enum tree_code code;
8297 /* Distribute the expected value over short-circuiting operators.
8298 See through the cast from truthvalue_type_node to long. */
8299 inner_arg0 = arg0;
8300 while (CONVERT_EXPR_P (inner_arg0)
8301 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8302 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8303 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8305 /* If this is a builtin_expect within a builtin_expect keep the
8306 inner one. See through a comparison against a constant. It
8307 might have been added to create a thruthvalue. */
8308 inner = inner_arg0;
8310 if (COMPARISON_CLASS_P (inner)
8311 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8312 inner = TREE_OPERAND (inner, 0);
8314 if (TREE_CODE (inner) == CALL_EXPR
8315 && (fndecl = get_callee_fndecl (inner))
8316 && (DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL, BUILT_IN_EXPECT)
8317 || DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL,
8318 BUILT_IN_EXPECT_WITH_PROBABILITY)))
8319 return arg0;
8321 inner = inner_arg0;
8322 code = TREE_CODE (inner);
8323 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8325 tree op0 = TREE_OPERAND (inner, 0);
8326 tree op1 = TREE_OPERAND (inner, 1);
8327 arg1 = save_expr (arg1);
8329 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8330 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8331 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8333 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8336 /* If the argument isn't invariant then there's nothing else we can do. */
8337 if (!TREE_CONSTANT (inner_arg0))
8338 return NULL_TREE;
8340 /* If we expect that a comparison against the argument will fold to
8341 a constant return the constant. In practice, this means a true
8342 constant or the address of a non-weak symbol. */
8343 inner = inner_arg0;
8344 STRIP_NOPS (inner);
8345 if (TREE_CODE (inner) == ADDR_EXPR)
8349 inner = TREE_OPERAND (inner, 0);
8351 while (TREE_CODE (inner) == COMPONENT_REF
8352 || TREE_CODE (inner) == ARRAY_REF);
8353 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8354 return NULL_TREE;
8357 /* Otherwise, ARG0 already has the proper type for the return value. */
8358 return arg0;
8361 /* Fold a call to __builtin_classify_type with argument ARG. */
8363 static tree
8364 fold_builtin_classify_type (tree arg)
8366 if (arg == 0)
8367 return build_int_cst (integer_type_node, no_type_class);
8369 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8372 /* Fold a call to __builtin_strlen with argument ARG. */
8374 static tree
8375 fold_builtin_strlen (location_t loc, tree type, tree arg)
8377 if (!validate_arg (arg, POINTER_TYPE))
8378 return NULL_TREE;
8379 else
8381 tree len = c_strlen (arg, 0);
8383 if (len)
8384 return fold_convert_loc (loc, type, len);
8386 return NULL_TREE;
8390 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8392 static tree
8393 fold_builtin_inf (location_t loc, tree type, int warn)
8395 REAL_VALUE_TYPE real;
8397 /* __builtin_inff is intended to be usable to define INFINITY on all
8398 targets. If an infinity is not available, INFINITY expands "to a
8399 positive constant of type float that overflows at translation
8400 time", footnote "In this case, using INFINITY will violate the
8401 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8402 Thus we pedwarn to ensure this constraint violation is
8403 diagnosed. */
8404 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8405 pedwarn (loc, 0, "target format does not support infinity");
8407 real_inf (&real);
8408 return build_real (type, real);
8411 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8412 NULL_TREE if no simplification can be made. */
8414 static tree
8415 fold_builtin_sincos (location_t loc,
8416 tree arg0, tree arg1, tree arg2)
8418 tree type;
8419 tree fndecl, call = NULL_TREE;
8421 if (!validate_arg (arg0, REAL_TYPE)
8422 || !validate_arg (arg1, POINTER_TYPE)
8423 || !validate_arg (arg2, POINTER_TYPE))
8424 return NULL_TREE;
8426 type = TREE_TYPE (arg0);
8428 /* Calculate the result when the argument is a constant. */
8429 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8430 if (fn == END_BUILTINS)
8431 return NULL_TREE;
8433 /* Canonicalize sincos to cexpi. */
8434 if (TREE_CODE (arg0) == REAL_CST)
8436 tree complex_type = build_complex_type (type);
8437 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8439 if (!call)
8441 if (!targetm.libc_has_function (function_c99_math_complex)
8442 || !builtin_decl_implicit_p (fn))
8443 return NULL_TREE;
8444 fndecl = builtin_decl_explicit (fn);
8445 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8446 call = builtin_save_expr (call);
8449 tree ptype = build_pointer_type (type);
8450 arg1 = fold_convert (ptype, arg1);
8451 arg2 = fold_convert (ptype, arg2);
8452 return build2 (COMPOUND_EXPR, void_type_node,
8453 build2 (MODIFY_EXPR, void_type_node,
8454 build_fold_indirect_ref_loc (loc, arg1),
8455 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8456 build2 (MODIFY_EXPR, void_type_node,
8457 build_fold_indirect_ref_loc (loc, arg2),
8458 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8461 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8462 Return NULL_TREE if no simplification can be made. */
8464 static tree
8465 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8467 if (!validate_arg (arg1, POINTER_TYPE)
8468 || !validate_arg (arg2, POINTER_TYPE)
8469 || !validate_arg (len, INTEGER_TYPE))
8470 return NULL_TREE;
8472 /* If the LEN parameter is zero, return zero. */
8473 if (integer_zerop (len))
8474 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8475 arg1, arg2);
8477 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8478 if (operand_equal_p (arg1, arg2, 0))
8479 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8481 /* If len parameter is one, return an expression corresponding to
8482 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8483 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8485 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8486 tree cst_uchar_ptr_node
8487 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8489 tree ind1
8490 = fold_convert_loc (loc, integer_type_node,
8491 build1 (INDIRECT_REF, cst_uchar_node,
8492 fold_convert_loc (loc,
8493 cst_uchar_ptr_node,
8494 arg1)));
8495 tree ind2
8496 = fold_convert_loc (loc, integer_type_node,
8497 build1 (INDIRECT_REF, cst_uchar_node,
8498 fold_convert_loc (loc,
8499 cst_uchar_ptr_node,
8500 arg2)));
8501 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8504 return NULL_TREE;
8507 /* Fold a call to builtin isascii with argument ARG. */
8509 static tree
8510 fold_builtin_isascii (location_t loc, tree arg)
8512 if (!validate_arg (arg, INTEGER_TYPE))
8513 return NULL_TREE;
8514 else
8516 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8517 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8518 build_int_cst (integer_type_node,
8519 ~ (unsigned HOST_WIDE_INT) 0x7f));
8520 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8521 arg, integer_zero_node);
8525 /* Fold a call to builtin toascii with argument ARG. */
8527 static tree
8528 fold_builtin_toascii (location_t loc, tree arg)
8530 if (!validate_arg (arg, INTEGER_TYPE))
8531 return NULL_TREE;
8533 /* Transform toascii(c) -> (c & 0x7f). */
8534 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8535 build_int_cst (integer_type_node, 0x7f));
8538 /* Fold a call to builtin isdigit with argument ARG. */
8540 static tree
8541 fold_builtin_isdigit (location_t loc, tree arg)
8543 if (!validate_arg (arg, INTEGER_TYPE))
8544 return NULL_TREE;
8545 else
8547 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8548 /* According to the C standard, isdigit is unaffected by locale.
8549 However, it definitely is affected by the target character set. */
8550 unsigned HOST_WIDE_INT target_digit0
8551 = lang_hooks.to_target_charset ('0');
8553 if (target_digit0 == 0)
8554 return NULL_TREE;
8556 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8557 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8558 build_int_cst (unsigned_type_node, target_digit0));
8559 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8560 build_int_cst (unsigned_type_node, 9));
8564 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8566 static tree
8567 fold_builtin_fabs (location_t loc, tree arg, tree type)
8569 if (!validate_arg (arg, REAL_TYPE))
8570 return NULL_TREE;
8572 arg = fold_convert_loc (loc, type, arg);
8573 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8576 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8578 static tree
8579 fold_builtin_abs (location_t loc, tree arg, tree type)
8581 if (!validate_arg (arg, INTEGER_TYPE))
8582 return NULL_TREE;
8584 arg = fold_convert_loc (loc, type, arg);
8585 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8588 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8590 static tree
8591 fold_builtin_carg (location_t loc, tree arg, tree type)
8593 if (validate_arg (arg, COMPLEX_TYPE)
8594 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8596 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8598 if (atan2_fn)
8600 tree new_arg = builtin_save_expr (arg);
8601 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8602 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8603 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8607 return NULL_TREE;
8610 /* Fold a call to builtin frexp, we can assume the base is 2. */
8612 static tree
8613 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8615 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8616 return NULL_TREE;
8618 STRIP_NOPS (arg0);
8620 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8621 return NULL_TREE;
8623 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8625 /* Proceed if a valid pointer type was passed in. */
8626 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8628 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8629 tree frac, exp;
8631 switch (value->cl)
8633 case rvc_zero:
8634 /* For +-0, return (*exp = 0, +-0). */
8635 exp = integer_zero_node;
8636 frac = arg0;
8637 break;
8638 case rvc_nan:
8639 case rvc_inf:
8640 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8641 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8642 case rvc_normal:
8644 /* Since the frexp function always expects base 2, and in
8645 GCC normalized significands are already in the range
8646 [0.5, 1.0), we have exactly what frexp wants. */
8647 REAL_VALUE_TYPE frac_rvt = *value;
8648 SET_REAL_EXP (&frac_rvt, 0);
8649 frac = build_real (rettype, frac_rvt);
8650 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8652 break;
8653 default:
8654 gcc_unreachable ();
8657 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8658 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8659 TREE_SIDE_EFFECTS (arg1) = 1;
8660 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8663 return NULL_TREE;
8666 /* Fold a call to builtin modf. */
8668 static tree
8669 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8671 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8672 return NULL_TREE;
8674 STRIP_NOPS (arg0);
8676 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8677 return NULL_TREE;
8679 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8681 /* Proceed if a valid pointer type was passed in. */
8682 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8684 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8685 REAL_VALUE_TYPE trunc, frac;
8687 switch (value->cl)
8689 case rvc_nan:
8690 case rvc_zero:
8691 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8692 trunc = frac = *value;
8693 break;
8694 case rvc_inf:
8695 /* For +-Inf, return (*arg1 = arg0, +-0). */
8696 frac = dconst0;
8697 frac.sign = value->sign;
8698 trunc = *value;
8699 break;
8700 case rvc_normal:
8701 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8702 real_trunc (&trunc, VOIDmode, value);
8703 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8704 /* If the original number was negative and already
8705 integral, then the fractional part is -0.0. */
8706 if (value->sign && frac.cl == rvc_zero)
8707 frac.sign = value->sign;
8708 break;
8711 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8712 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8713 build_real (rettype, trunc));
8714 TREE_SIDE_EFFECTS (arg1) = 1;
8715 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8716 build_real (rettype, frac));
8719 return NULL_TREE;
8722 /* Given a location LOC, an interclass builtin function decl FNDECL
8723 and its single argument ARG, return an folded expression computing
8724 the same, or NULL_TREE if we either couldn't or didn't want to fold
8725 (the latter happen if there's an RTL instruction available). */
8727 static tree
8728 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8730 machine_mode mode;
8732 if (!validate_arg (arg, REAL_TYPE))
8733 return NULL_TREE;
8735 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8736 return NULL_TREE;
8738 mode = TYPE_MODE (TREE_TYPE (arg));
8740 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8742 /* If there is no optab, try generic code. */
8743 switch (DECL_FUNCTION_CODE (fndecl))
8745 tree result;
8747 CASE_FLT_FN (BUILT_IN_ISINF):
8749 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8750 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8751 tree type = TREE_TYPE (arg);
8752 REAL_VALUE_TYPE r;
8753 char buf[128];
8755 if (is_ibm_extended)
8757 /* NaN and Inf are encoded in the high-order double value
8758 only. The low-order value is not significant. */
8759 type = double_type_node;
8760 mode = DFmode;
8761 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8763 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8764 real_from_string (&r, buf);
8765 result = build_call_expr (isgr_fn, 2,
8766 fold_build1_loc (loc, ABS_EXPR, type, arg),
8767 build_real (type, r));
8768 return result;
8770 CASE_FLT_FN (BUILT_IN_FINITE):
8771 case BUILT_IN_ISFINITE:
8773 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8774 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8775 tree type = TREE_TYPE (arg);
8776 REAL_VALUE_TYPE r;
8777 char buf[128];
8779 if (is_ibm_extended)
8781 /* NaN and Inf are encoded in the high-order double value
8782 only. The low-order value is not significant. */
8783 type = double_type_node;
8784 mode = DFmode;
8785 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8787 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8788 real_from_string (&r, buf);
8789 result = build_call_expr (isle_fn, 2,
8790 fold_build1_loc (loc, ABS_EXPR, type, arg),
8791 build_real (type, r));
8792 /*result = fold_build2_loc (loc, UNGT_EXPR,
8793 TREE_TYPE (TREE_TYPE (fndecl)),
8794 fold_build1_loc (loc, ABS_EXPR, type, arg),
8795 build_real (type, r));
8796 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8797 TREE_TYPE (TREE_TYPE (fndecl)),
8798 result);*/
8799 return result;
8801 case BUILT_IN_ISNORMAL:
8803 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8804 islessequal(fabs(x),DBL_MAX). */
8805 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8806 tree type = TREE_TYPE (arg);
8807 tree orig_arg, max_exp, min_exp;
8808 machine_mode orig_mode = mode;
8809 REAL_VALUE_TYPE rmax, rmin;
8810 char buf[128];
8812 orig_arg = arg = builtin_save_expr (arg);
8813 if (is_ibm_extended)
8815 /* Use double to test the normal range of IBM extended
8816 precision. Emin for IBM extended precision is
8817 different to emin for IEEE double, being 53 higher
8818 since the low double exponent is at least 53 lower
8819 than the high double exponent. */
8820 type = double_type_node;
8821 mode = DFmode;
8822 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8824 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8826 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8827 real_from_string (&rmax, buf);
8828 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8829 real_from_string (&rmin, buf);
8830 max_exp = build_real (type, rmax);
8831 min_exp = build_real (type, rmin);
8833 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8834 if (is_ibm_extended)
8836 /* Testing the high end of the range is done just using
8837 the high double, using the same test as isfinite().
8838 For the subnormal end of the range we first test the
8839 high double, then if its magnitude is equal to the
8840 limit of 0x1p-969, we test whether the low double is
8841 non-zero and opposite sign to the high double. */
8842 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8843 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8844 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8845 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8846 arg, min_exp);
8847 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8848 complex_double_type_node, orig_arg);
8849 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8850 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8851 tree zero = build_real (type, dconst0);
8852 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8853 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8854 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8855 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8856 fold_build3 (COND_EXPR,
8857 integer_type_node,
8858 hilt, logt, lolt));
8859 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8860 eq_min, ok_lo);
8861 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8862 gt_min, eq_min);
8864 else
8866 tree const isge_fn
8867 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8868 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8870 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8871 max_exp, min_exp);
8872 return result;
8874 default:
8875 break;
8878 return NULL_TREE;
8881 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8882 ARG is the argument for the call. */
8884 static tree
8885 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8889 if (!validate_arg (arg, REAL_TYPE))
8890 return NULL_TREE;
8892 switch (builtin_index)
8894 case BUILT_IN_ISINF:
8895 if (!HONOR_INFINITIES (arg))
8896 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8898 return NULL_TREE;
8900 case BUILT_IN_ISINF_SIGN:
8902 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8903 /* In a boolean context, GCC will fold the inner COND_EXPR to
8904 1. So e.g. "if (isinf_sign(x))" would be folded to just
8905 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8906 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8907 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8908 tree tmp = NULL_TREE;
8910 arg = builtin_save_expr (arg);
8912 if (signbit_fn && isinf_fn)
8914 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8915 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8917 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8918 signbit_call, integer_zero_node);
8919 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8920 isinf_call, integer_zero_node);
8922 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8923 integer_minus_one_node, integer_one_node);
8924 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8925 isinf_call, tmp,
8926 integer_zero_node);
8929 return tmp;
8932 case BUILT_IN_ISFINITE:
8933 if (!HONOR_NANS (arg)
8934 && !HONOR_INFINITIES (arg))
8935 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8937 return NULL_TREE;
8939 case BUILT_IN_ISNAN:
8940 if (!HONOR_NANS (arg))
8941 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8944 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8945 if (is_ibm_extended)
8947 /* NaN and Inf are encoded in the high-order double value
8948 only. The low-order value is not significant. */
8949 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8952 arg = builtin_save_expr (arg);
8953 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8955 default:
8956 gcc_unreachable ();
8960 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8961 This builtin will generate code to return the appropriate floating
8962 point classification depending on the value of the floating point
8963 number passed in. The possible return values must be supplied as
8964 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8965 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8966 one floating point argument which is "type generic". */
8968 static tree
8969 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8971 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8972 arg, type, res, tmp;
8973 machine_mode mode;
8974 REAL_VALUE_TYPE r;
8975 char buf[128];
8977 /* Verify the required arguments in the original call. */
8978 if (nargs != 6
8979 || !validate_arg (args[0], INTEGER_TYPE)
8980 || !validate_arg (args[1], INTEGER_TYPE)
8981 || !validate_arg (args[2], INTEGER_TYPE)
8982 || !validate_arg (args[3], INTEGER_TYPE)
8983 || !validate_arg (args[4], INTEGER_TYPE)
8984 || !validate_arg (args[5], REAL_TYPE))
8985 return NULL_TREE;
8987 fp_nan = args[0];
8988 fp_infinite = args[1];
8989 fp_normal = args[2];
8990 fp_subnormal = args[3];
8991 fp_zero = args[4];
8992 arg = args[5];
8993 type = TREE_TYPE (arg);
8994 mode = TYPE_MODE (type);
8995 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8997 /* fpclassify(x) ->
8998 isnan(x) ? FP_NAN :
8999 (fabs(x) == Inf ? FP_INFINITE :
9000 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9001 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9003 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9004 build_real (type, dconst0));
9005 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9006 tmp, fp_zero, fp_subnormal);
9008 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9009 real_from_string (&r, buf);
9010 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9011 arg, build_real (type, r));
9012 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9014 if (HONOR_INFINITIES (mode))
9016 real_inf (&r);
9017 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9018 build_real (type, r));
9019 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9020 fp_infinite, res);
9023 if (HONOR_NANS (mode))
9025 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9026 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9029 return res;
9032 /* Fold a call to an unordered comparison function such as
9033 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9034 being called and ARG0 and ARG1 are the arguments for the call.
9035 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9036 the opposite of the desired result. UNORDERED_CODE is used
9037 for modes that can hold NaNs and ORDERED_CODE is used for
9038 the rest. */
9040 static tree
9041 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9042 enum tree_code unordered_code,
9043 enum tree_code ordered_code)
9045 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9046 enum tree_code code;
9047 tree type0, type1;
9048 enum tree_code code0, code1;
9049 tree cmp_type = NULL_TREE;
9051 type0 = TREE_TYPE (arg0);
9052 type1 = TREE_TYPE (arg1);
9054 code0 = TREE_CODE (type0);
9055 code1 = TREE_CODE (type1);
9057 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9058 /* Choose the wider of two real types. */
9059 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9060 ? type0 : type1;
9061 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9062 cmp_type = type0;
9063 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9064 cmp_type = type1;
9066 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9067 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9069 if (unordered_code == UNORDERED_EXPR)
9071 if (!HONOR_NANS (arg0))
9072 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9073 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9076 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9077 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9078 fold_build2_loc (loc, code, type, arg0, arg1));
9081 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9082 arithmetics if it can never overflow, or into internal functions that
9083 return both result of arithmetics and overflowed boolean flag in
9084 a complex integer result, or some other check for overflow.
9085 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9086 checking part of that. */
9088 static tree
9089 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9090 tree arg0, tree arg1, tree arg2)
9092 enum internal_fn ifn = IFN_LAST;
9093 /* The code of the expression corresponding to the type-generic
9094 built-in, or ERROR_MARK for the type-specific ones. */
9095 enum tree_code opcode = ERROR_MARK;
9096 bool ovf_only = false;
9098 switch (fcode)
9100 case BUILT_IN_ADD_OVERFLOW_P:
9101 ovf_only = true;
9102 /* FALLTHRU */
9103 case BUILT_IN_ADD_OVERFLOW:
9104 opcode = PLUS_EXPR;
9105 /* FALLTHRU */
9106 case BUILT_IN_SADD_OVERFLOW:
9107 case BUILT_IN_SADDL_OVERFLOW:
9108 case BUILT_IN_SADDLL_OVERFLOW:
9109 case BUILT_IN_UADD_OVERFLOW:
9110 case BUILT_IN_UADDL_OVERFLOW:
9111 case BUILT_IN_UADDLL_OVERFLOW:
9112 ifn = IFN_ADD_OVERFLOW;
9113 break;
9114 case BUILT_IN_SUB_OVERFLOW_P:
9115 ovf_only = true;
9116 /* FALLTHRU */
9117 case BUILT_IN_SUB_OVERFLOW:
9118 opcode = MINUS_EXPR;
9119 /* FALLTHRU */
9120 case BUILT_IN_SSUB_OVERFLOW:
9121 case BUILT_IN_SSUBL_OVERFLOW:
9122 case BUILT_IN_SSUBLL_OVERFLOW:
9123 case BUILT_IN_USUB_OVERFLOW:
9124 case BUILT_IN_USUBL_OVERFLOW:
9125 case BUILT_IN_USUBLL_OVERFLOW:
9126 ifn = IFN_SUB_OVERFLOW;
9127 break;
9128 case BUILT_IN_MUL_OVERFLOW_P:
9129 ovf_only = true;
9130 /* FALLTHRU */
9131 case BUILT_IN_MUL_OVERFLOW:
9132 opcode = MULT_EXPR;
9133 /* FALLTHRU */
9134 case BUILT_IN_SMUL_OVERFLOW:
9135 case BUILT_IN_SMULL_OVERFLOW:
9136 case BUILT_IN_SMULLL_OVERFLOW:
9137 case BUILT_IN_UMUL_OVERFLOW:
9138 case BUILT_IN_UMULL_OVERFLOW:
9139 case BUILT_IN_UMULLL_OVERFLOW:
9140 ifn = IFN_MUL_OVERFLOW;
9141 break;
9142 default:
9143 gcc_unreachable ();
9146 /* For the "generic" overloads, the first two arguments can have different
9147 types and the last argument determines the target type to use to check
9148 for overflow. The arguments of the other overloads all have the same
9149 type. */
9150 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9152 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9153 arguments are constant, attempt to fold the built-in call into a constant
9154 expression indicating whether or not it detected an overflow. */
9155 if (ovf_only
9156 && TREE_CODE (arg0) == INTEGER_CST
9157 && TREE_CODE (arg1) == INTEGER_CST)
9158 /* Perform the computation in the target type and check for overflow. */
9159 return omit_one_operand_loc (loc, boolean_type_node,
9160 arith_overflowed_p (opcode, type, arg0, arg1)
9161 ? boolean_true_node : boolean_false_node,
9162 arg2);
9164 tree ctype = build_complex_type (type);
9165 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9166 2, arg0, arg1);
9167 tree tgt = save_expr (call);
9168 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9169 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9170 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9172 if (ovf_only)
9173 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9175 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9176 tree store
9177 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9178 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9181 /* Fold a call to __builtin_FILE to a constant string. */
9183 static inline tree
9184 fold_builtin_FILE (location_t loc)
9186 if (const char *fname = LOCATION_FILE (loc))
9188 /* The documentation says this builtin is equivalent to the preprocessor
9189 __FILE__ macro so it appears appropriate to use the same file prefix
9190 mappings. */
9191 fname = remap_macro_filename (fname);
9192 return build_string_literal (strlen (fname) + 1, fname);
9195 return build_string_literal (1, "");
9198 /* Fold a call to __builtin_FUNCTION to a constant string. */
9200 static inline tree
9201 fold_builtin_FUNCTION ()
9203 const char *name = "";
9205 if (current_function_decl)
9206 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9208 return build_string_literal (strlen (name) + 1, name);
9211 /* Fold a call to __builtin_LINE to an integer constant. */
9213 static inline tree
9214 fold_builtin_LINE (location_t loc, tree type)
9216 return build_int_cst (type, LOCATION_LINE (loc));
9219 /* Fold a call to built-in function FNDECL with 0 arguments.
9220 This function returns NULL_TREE if no simplification was possible. */
9222 static tree
9223 fold_builtin_0 (location_t loc, tree fndecl)
9225 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9227 switch (fcode)
9229 case BUILT_IN_FILE:
9230 return fold_builtin_FILE (loc);
9232 case BUILT_IN_FUNCTION:
9233 return fold_builtin_FUNCTION ();
9235 case BUILT_IN_LINE:
9236 return fold_builtin_LINE (loc, type);
9238 CASE_FLT_FN (BUILT_IN_INF):
9239 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9240 case BUILT_IN_INFD32:
9241 case BUILT_IN_INFD64:
9242 case BUILT_IN_INFD128:
9243 return fold_builtin_inf (loc, type, true);
9245 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9246 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9247 return fold_builtin_inf (loc, type, false);
9249 case BUILT_IN_CLASSIFY_TYPE:
9250 return fold_builtin_classify_type (NULL_TREE);
9252 default:
9253 break;
9255 return NULL_TREE;
9258 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9259 This function returns NULL_TREE if no simplification was possible. */
9261 static tree
9262 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9264 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9265 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9267 if (TREE_CODE (arg0) == ERROR_MARK)
9268 return NULL_TREE;
9270 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9271 return ret;
9273 switch (fcode)
9275 case BUILT_IN_CONSTANT_P:
9277 tree val = fold_builtin_constant_p (arg0);
9279 /* Gimplification will pull the CALL_EXPR for the builtin out of
9280 an if condition. When not optimizing, we'll not CSE it back.
9281 To avoid link error types of regressions, return false now. */
9282 if (!val && !optimize)
9283 val = integer_zero_node;
9285 return val;
9288 case BUILT_IN_CLASSIFY_TYPE:
9289 return fold_builtin_classify_type (arg0);
9291 case BUILT_IN_STRLEN:
9292 return fold_builtin_strlen (loc, type, arg0);
9294 CASE_FLT_FN (BUILT_IN_FABS):
9295 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9296 case BUILT_IN_FABSD32:
9297 case BUILT_IN_FABSD64:
9298 case BUILT_IN_FABSD128:
9299 return fold_builtin_fabs (loc, arg0, type);
9301 case BUILT_IN_ABS:
9302 case BUILT_IN_LABS:
9303 case BUILT_IN_LLABS:
9304 case BUILT_IN_IMAXABS:
9305 return fold_builtin_abs (loc, arg0, type);
9307 CASE_FLT_FN (BUILT_IN_CONJ):
9308 if (validate_arg (arg0, COMPLEX_TYPE)
9309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9310 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9311 break;
9313 CASE_FLT_FN (BUILT_IN_CREAL):
9314 if (validate_arg (arg0, COMPLEX_TYPE)
9315 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9316 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9317 break;
9319 CASE_FLT_FN (BUILT_IN_CIMAG):
9320 if (validate_arg (arg0, COMPLEX_TYPE)
9321 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9322 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9323 break;
9325 CASE_FLT_FN (BUILT_IN_CARG):
9326 return fold_builtin_carg (loc, arg0, type);
9328 case BUILT_IN_ISASCII:
9329 return fold_builtin_isascii (loc, arg0);
9331 case BUILT_IN_TOASCII:
9332 return fold_builtin_toascii (loc, arg0);
9334 case BUILT_IN_ISDIGIT:
9335 return fold_builtin_isdigit (loc, arg0);
9337 CASE_FLT_FN (BUILT_IN_FINITE):
9338 case BUILT_IN_FINITED32:
9339 case BUILT_IN_FINITED64:
9340 case BUILT_IN_FINITED128:
9341 case BUILT_IN_ISFINITE:
9343 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9344 if (ret)
9345 return ret;
9346 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9349 CASE_FLT_FN (BUILT_IN_ISINF):
9350 case BUILT_IN_ISINFD32:
9351 case BUILT_IN_ISINFD64:
9352 case BUILT_IN_ISINFD128:
9354 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9355 if (ret)
9356 return ret;
9357 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9360 case BUILT_IN_ISNORMAL:
9361 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9363 case BUILT_IN_ISINF_SIGN:
9364 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9366 CASE_FLT_FN (BUILT_IN_ISNAN):
9367 case BUILT_IN_ISNAND32:
9368 case BUILT_IN_ISNAND64:
9369 case BUILT_IN_ISNAND128:
9370 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9372 case BUILT_IN_FREE:
9373 if (integer_zerop (arg0))
9374 return build_empty_stmt (loc);
9375 break;
9377 default:
9378 break;
9381 return NULL_TREE;
9385 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9386 This function returns NULL_TREE if no simplification was possible. */
9388 static tree
9389 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9391 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9392 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9394 if (TREE_CODE (arg0) == ERROR_MARK
9395 || TREE_CODE (arg1) == ERROR_MARK)
9396 return NULL_TREE;
9398 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9399 return ret;
9401 switch (fcode)
9403 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9404 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9405 if (validate_arg (arg0, REAL_TYPE)
9406 && validate_arg (arg1, POINTER_TYPE))
9407 return do_mpfr_lgamma_r (arg0, arg1, type);
9408 break;
9410 CASE_FLT_FN (BUILT_IN_FREXP):
9411 return fold_builtin_frexp (loc, arg0, arg1, type);
9413 CASE_FLT_FN (BUILT_IN_MODF):
9414 return fold_builtin_modf (loc, arg0, arg1, type);
9416 case BUILT_IN_STRSPN:
9417 return fold_builtin_strspn (loc, arg0, arg1);
9419 case BUILT_IN_STRCSPN:
9420 return fold_builtin_strcspn (loc, arg0, arg1);
9422 case BUILT_IN_STRPBRK:
9423 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9425 case BUILT_IN_EXPECT:
9426 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9428 case BUILT_IN_ISGREATER:
9429 return fold_builtin_unordered_cmp (loc, fndecl,
9430 arg0, arg1, UNLE_EXPR, LE_EXPR);
9431 case BUILT_IN_ISGREATEREQUAL:
9432 return fold_builtin_unordered_cmp (loc, fndecl,
9433 arg0, arg1, UNLT_EXPR, LT_EXPR);
9434 case BUILT_IN_ISLESS:
9435 return fold_builtin_unordered_cmp (loc, fndecl,
9436 arg0, arg1, UNGE_EXPR, GE_EXPR);
9437 case BUILT_IN_ISLESSEQUAL:
9438 return fold_builtin_unordered_cmp (loc, fndecl,
9439 arg0, arg1, UNGT_EXPR, GT_EXPR);
9440 case BUILT_IN_ISLESSGREATER:
9441 return fold_builtin_unordered_cmp (loc, fndecl,
9442 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9443 case BUILT_IN_ISUNORDERED:
9444 return fold_builtin_unordered_cmp (loc, fndecl,
9445 arg0, arg1, UNORDERED_EXPR,
9446 NOP_EXPR);
9448 /* We do the folding for va_start in the expander. */
9449 case BUILT_IN_VA_START:
9450 break;
9452 case BUILT_IN_OBJECT_SIZE:
9453 return fold_builtin_object_size (arg0, arg1);
9455 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9456 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9458 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9459 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9461 default:
9462 break;
9464 return NULL_TREE;
9467 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9468 and ARG2.
9469 This function returns NULL_TREE if no simplification was possible. */
9471 static tree
9472 fold_builtin_3 (location_t loc, tree fndecl,
9473 tree arg0, tree arg1, tree arg2)
9475 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9476 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9478 if (TREE_CODE (arg0) == ERROR_MARK
9479 || TREE_CODE (arg1) == ERROR_MARK
9480 || TREE_CODE (arg2) == ERROR_MARK)
9481 return NULL_TREE;
9483 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9484 arg0, arg1, arg2))
9485 return ret;
9487 switch (fcode)
9490 CASE_FLT_FN (BUILT_IN_SINCOS):
9491 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9493 CASE_FLT_FN (BUILT_IN_REMQUO):
9494 if (validate_arg (arg0, REAL_TYPE)
9495 && validate_arg (arg1, REAL_TYPE)
9496 && validate_arg (arg2, POINTER_TYPE))
9497 return do_mpfr_remquo (arg0, arg1, arg2);
9498 break;
9500 case BUILT_IN_MEMCMP:
9501 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9503 case BUILT_IN_EXPECT:
9504 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9506 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9507 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9509 case BUILT_IN_ADD_OVERFLOW:
9510 case BUILT_IN_SUB_OVERFLOW:
9511 case BUILT_IN_MUL_OVERFLOW:
9512 case BUILT_IN_ADD_OVERFLOW_P:
9513 case BUILT_IN_SUB_OVERFLOW_P:
9514 case BUILT_IN_MUL_OVERFLOW_P:
9515 case BUILT_IN_SADD_OVERFLOW:
9516 case BUILT_IN_SADDL_OVERFLOW:
9517 case BUILT_IN_SADDLL_OVERFLOW:
9518 case BUILT_IN_SSUB_OVERFLOW:
9519 case BUILT_IN_SSUBL_OVERFLOW:
9520 case BUILT_IN_SSUBLL_OVERFLOW:
9521 case BUILT_IN_SMUL_OVERFLOW:
9522 case BUILT_IN_SMULL_OVERFLOW:
9523 case BUILT_IN_SMULLL_OVERFLOW:
9524 case BUILT_IN_UADD_OVERFLOW:
9525 case BUILT_IN_UADDL_OVERFLOW:
9526 case BUILT_IN_UADDLL_OVERFLOW:
9527 case BUILT_IN_USUB_OVERFLOW:
9528 case BUILT_IN_USUBL_OVERFLOW:
9529 case BUILT_IN_USUBLL_OVERFLOW:
9530 case BUILT_IN_UMUL_OVERFLOW:
9531 case BUILT_IN_UMULL_OVERFLOW:
9532 case BUILT_IN_UMULLL_OVERFLOW:
9533 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9535 default:
9536 break;
9538 return NULL_TREE;
9541 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9542 arguments. IGNORE is true if the result of the
9543 function call is ignored. This function returns NULL_TREE if no
9544 simplification was possible. */
9546 tree
9547 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9549 tree ret = NULL_TREE;
9551 switch (nargs)
9553 case 0:
9554 ret = fold_builtin_0 (loc, fndecl);
9555 break;
9556 case 1:
9557 ret = fold_builtin_1 (loc, fndecl, args[0]);
9558 break;
9559 case 2:
9560 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9561 break;
9562 case 3:
9563 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9564 break;
9565 default:
9566 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9567 break;
9569 if (ret)
9571 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9572 SET_EXPR_LOCATION (ret, loc);
9573 return ret;
9575 return NULL_TREE;
9578 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9579 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9580 of arguments in ARGS to be omitted. OLDNARGS is the number of
9581 elements in ARGS. */
9583 static tree
9584 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9585 int skip, tree fndecl, int n, va_list newargs)
9587 int nargs = oldnargs - skip + n;
9588 tree *buffer;
9590 if (n > 0)
9592 int i, j;
9594 buffer = XALLOCAVEC (tree, nargs);
9595 for (i = 0; i < n; i++)
9596 buffer[i] = va_arg (newargs, tree);
9597 for (j = skip; j < oldnargs; j++, i++)
9598 buffer[i] = args[j];
9600 else
9601 buffer = args + skip;
9603 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9606 /* Return true if FNDECL shouldn't be folded right now.
9607 If a built-in function has an inline attribute always_inline
9608 wrapper, defer folding it after always_inline functions have
9609 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9610 might not be performed. */
9612 bool
9613 avoid_folding_inline_builtin (tree fndecl)
9615 return (DECL_DECLARED_INLINE_P (fndecl)
9616 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9617 && cfun
9618 && !cfun->always_inline_functions_inlined
9619 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9622 /* A wrapper function for builtin folding that prevents warnings for
9623 "statement without effect" and the like, caused by removing the
9624 call node earlier than the warning is generated. */
9626 tree
9627 fold_call_expr (location_t loc, tree exp, bool ignore)
9629 tree ret = NULL_TREE;
9630 tree fndecl = get_callee_fndecl (exp);
9631 if (fndecl
9632 && TREE_CODE (fndecl) == FUNCTION_DECL
9633 && DECL_BUILT_IN (fndecl)
9634 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9635 yet. Defer folding until we see all the arguments
9636 (after inlining). */
9637 && !CALL_EXPR_VA_ARG_PACK (exp))
9639 int nargs = call_expr_nargs (exp);
9641 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9642 instead last argument is __builtin_va_arg_pack (). Defer folding
9643 even in that case, until arguments are finalized. */
9644 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9646 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9647 if (fndecl2
9648 && TREE_CODE (fndecl2) == FUNCTION_DECL
9649 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9650 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9651 return NULL_TREE;
9654 if (avoid_folding_inline_builtin (fndecl))
9655 return NULL_TREE;
9657 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9658 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9659 CALL_EXPR_ARGP (exp), ignore);
9660 else
9662 tree *args = CALL_EXPR_ARGP (exp);
9663 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9664 if (ret)
9665 return ret;
9668 return NULL_TREE;
9671 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9672 N arguments are passed in the array ARGARRAY. Return a folded
9673 expression or NULL_TREE if no simplification was possible. */
9675 tree
9676 fold_builtin_call_array (location_t loc, tree,
9677 tree fn,
9678 int n,
9679 tree *argarray)
9681 if (TREE_CODE (fn) != ADDR_EXPR)
9682 return NULL_TREE;
9684 tree fndecl = TREE_OPERAND (fn, 0);
9685 if (TREE_CODE (fndecl) == FUNCTION_DECL
9686 && DECL_BUILT_IN (fndecl))
9688 /* If last argument is __builtin_va_arg_pack (), arguments to this
9689 function are not finalized yet. Defer folding until they are. */
9690 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9692 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9693 if (fndecl2
9694 && TREE_CODE (fndecl2) == FUNCTION_DECL
9695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9697 return NULL_TREE;
9699 if (avoid_folding_inline_builtin (fndecl))
9700 return NULL_TREE;
9701 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9702 return targetm.fold_builtin (fndecl, n, argarray, false);
9703 else
9704 return fold_builtin_n (loc, fndecl, argarray, n, false);
9707 return NULL_TREE;
9710 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9711 along with N new arguments specified as the "..." parameters. SKIP
9712 is the number of arguments in EXP to be omitted. This function is used
9713 to do varargs-to-varargs transformations. */
9715 static tree
9716 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9718 va_list ap;
9719 tree t;
9721 va_start (ap, n);
9722 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9723 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9724 va_end (ap);
9726 return t;
9729 /* Validate a single argument ARG against a tree code CODE representing
9730 a type. Return true when argument is valid. */
9732 static bool
9733 validate_arg (const_tree arg, enum tree_code code)
9735 if (!arg)
9736 return false;
9737 else if (code == POINTER_TYPE)
9738 return POINTER_TYPE_P (TREE_TYPE (arg));
9739 else if (code == INTEGER_TYPE)
9740 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9741 return code == TREE_CODE (TREE_TYPE (arg));
9744 /* This function validates the types of a function call argument list
9745 against a specified list of tree_codes. If the last specifier is a 0,
9746 that represents an ellipses, otherwise the last specifier must be a
9747 VOID_TYPE.
9749 This is the GIMPLE version of validate_arglist. Eventually we want to
9750 completely convert builtins.c to work from GIMPLEs and the tree based
9751 validate_arglist will then be removed. */
9753 bool
9754 validate_gimple_arglist (const gcall *call, ...)
9756 enum tree_code code;
9757 bool res = 0;
9758 va_list ap;
9759 const_tree arg;
9760 size_t i;
9762 va_start (ap, call);
9763 i = 0;
9767 code = (enum tree_code) va_arg (ap, int);
9768 switch (code)
9770 case 0:
9771 /* This signifies an ellipses, any further arguments are all ok. */
9772 res = true;
9773 goto end;
9774 case VOID_TYPE:
9775 /* This signifies an endlink, if no arguments remain, return
9776 true, otherwise return false. */
9777 res = (i == gimple_call_num_args (call));
9778 goto end;
9779 default:
9780 /* If no parameters remain or the parameter's code does not
9781 match the specified code, return false. Otherwise continue
9782 checking any remaining arguments. */
9783 arg = gimple_call_arg (call, i++);
9784 if (!validate_arg (arg, code))
9785 goto end;
9786 break;
9789 while (1);
9791 /* We need gotos here since we can only have one VA_CLOSE in a
9792 function. */
9793 end: ;
9794 va_end (ap);
9796 return res;
9799 /* Default target-specific builtin expander that does nothing. */
9802 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9803 rtx target ATTRIBUTE_UNUSED,
9804 rtx subtarget ATTRIBUTE_UNUSED,
9805 machine_mode mode ATTRIBUTE_UNUSED,
9806 int ignore ATTRIBUTE_UNUSED)
9808 return NULL_RTX;
9811 /* Returns true is EXP represents data that would potentially reside
9812 in a readonly section. */
9814 bool
9815 readonly_data_expr (tree exp)
9817 STRIP_NOPS (exp);
9819 if (TREE_CODE (exp) != ADDR_EXPR)
9820 return false;
9822 exp = get_base_address (TREE_OPERAND (exp, 0));
9823 if (!exp)
9824 return false;
9826 /* Make sure we call decl_readonly_section only for trees it
9827 can handle (since it returns true for everything it doesn't
9828 understand). */
9829 if (TREE_CODE (exp) == STRING_CST
9830 || TREE_CODE (exp) == CONSTRUCTOR
9831 || (VAR_P (exp) && TREE_STATIC (exp)))
9832 return decl_readonly_section (exp, 0);
9833 else
9834 return false;
9837 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9838 to the call, and TYPE is its return type.
9840 Return NULL_TREE if no simplification was possible, otherwise return the
9841 simplified form of the call as a tree.
9843 The simplified form may be a constant or other expression which
9844 computes the same value, but in a more efficient manner (including
9845 calls to other builtin functions).
9847 The call may contain arguments which need to be evaluated, but
9848 which are not useful to determine the result of the call. In
9849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9850 COMPOUND_EXPR will be an argument which must be evaluated.
9851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9852 COMPOUND_EXPR in the chain will contain the tree for the simplified
9853 form of the builtin function call. */
9855 static tree
9856 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9858 if (!validate_arg (s1, POINTER_TYPE)
9859 || !validate_arg (s2, POINTER_TYPE))
9860 return NULL_TREE;
9861 else
9863 tree fn;
9864 const char *p1, *p2;
9866 p2 = c_getstr (s2);
9867 if (p2 == NULL)
9868 return NULL_TREE;
9870 p1 = c_getstr (s1);
9871 if (p1 != NULL)
9873 const char *r = strpbrk (p1, p2);
9874 tree tem;
9876 if (r == NULL)
9877 return build_int_cst (TREE_TYPE (s1), 0);
9879 /* Return an offset into the constant string argument. */
9880 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9881 return fold_convert_loc (loc, type, tem);
9884 if (p2[0] == '\0')
9885 /* strpbrk(x, "") == NULL.
9886 Evaluate and ignore s1 in case it had side-effects. */
9887 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9889 if (p2[1] != '\0')
9890 return NULL_TREE; /* Really call strpbrk. */
9892 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9893 if (!fn)
9894 return NULL_TREE;
9896 /* New argument list transforming strpbrk(s1, s2) to
9897 strchr(s1, s2[0]). */
9898 return build_call_expr_loc (loc, fn, 2, s1,
9899 build_int_cst (integer_type_node, p2[0]));
9903 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9904 to the call.
9906 Return NULL_TREE if no simplification was possible, otherwise return the
9907 simplified form of the call as a tree.
9909 The simplified form may be a constant or other expression which
9910 computes the same value, but in a more efficient manner (including
9911 calls to other builtin functions).
9913 The call may contain arguments which need to be evaluated, but
9914 which are not useful to determine the result of the call. In
9915 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9916 COMPOUND_EXPR will be an argument which must be evaluated.
9917 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9918 COMPOUND_EXPR in the chain will contain the tree for the simplified
9919 form of the builtin function call. */
9921 static tree
9922 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9924 if (!validate_arg (s1, POINTER_TYPE)
9925 || !validate_arg (s2, POINTER_TYPE))
9926 return NULL_TREE;
9927 else
9929 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9931 /* If either argument is "", return NULL_TREE. */
9932 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9933 /* Evaluate and ignore both arguments in case either one has
9934 side-effects. */
9935 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9936 s1, s2);
9937 return NULL_TREE;
9941 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9942 to the call.
9944 Return NULL_TREE if no simplification was possible, otherwise return the
9945 simplified form of the call as a tree.
9947 The simplified form may be a constant or other expression which
9948 computes the same value, but in a more efficient manner (including
9949 calls to other builtin functions).
9951 The call may contain arguments which need to be evaluated, but
9952 which are not useful to determine the result of the call. In
9953 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9954 COMPOUND_EXPR will be an argument which must be evaluated.
9955 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9956 COMPOUND_EXPR in the chain will contain the tree for the simplified
9957 form of the builtin function call. */
9959 static tree
9960 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9962 if (!validate_arg (s1, POINTER_TYPE)
9963 || !validate_arg (s2, POINTER_TYPE))
9964 return NULL_TREE;
9965 else
9967 /* If the first argument is "", return NULL_TREE. */
9968 const char *p1 = c_getstr (s1);
9969 if (p1 && *p1 == '\0')
9971 /* Evaluate and ignore argument s2 in case it has
9972 side-effects. */
9973 return omit_one_operand_loc (loc, size_type_node,
9974 size_zero_node, s2);
9977 /* If the second argument is "", return __builtin_strlen(s1). */
9978 const char *p2 = c_getstr (s2);
9979 if (p2 && *p2 == '\0')
9981 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9983 /* If the replacement _DECL isn't initialized, don't do the
9984 transformation. */
9985 if (!fn)
9986 return NULL_TREE;
9988 return build_call_expr_loc (loc, fn, 1, s1);
9990 return NULL_TREE;
9994 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9995 produced. False otherwise. This is done so that we don't output the error
9996 or warning twice or three times. */
9998 bool
9999 fold_builtin_next_arg (tree exp, bool va_start_p)
10001 tree fntype = TREE_TYPE (current_function_decl);
10002 int nargs = call_expr_nargs (exp);
10003 tree arg;
10004 /* There is good chance the current input_location points inside the
10005 definition of the va_start macro (perhaps on the token for
10006 builtin) in a system header, so warnings will not be emitted.
10007 Use the location in real source code. */
10008 source_location current_location =
10009 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10010 NULL);
10012 if (!stdarg_p (fntype))
10014 error ("%<va_start%> used in function with fixed args");
10015 return true;
10018 if (va_start_p)
10020 if (va_start_p && (nargs != 2))
10022 error ("wrong number of arguments to function %<va_start%>");
10023 return true;
10025 arg = CALL_EXPR_ARG (exp, 1);
10027 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10028 when we checked the arguments and if needed issued a warning. */
10029 else
10031 if (nargs == 0)
10033 /* Evidently an out of date version of <stdarg.h>; can't validate
10034 va_start's second argument, but can still work as intended. */
10035 warning_at (current_location,
10036 OPT_Wvarargs,
10037 "%<__builtin_next_arg%> called without an argument");
10038 return true;
10040 else if (nargs > 1)
10042 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10043 return true;
10045 arg = CALL_EXPR_ARG (exp, 0);
10048 if (TREE_CODE (arg) == SSA_NAME)
10049 arg = SSA_NAME_VAR (arg);
10051 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10052 or __builtin_next_arg (0) the first time we see it, after checking
10053 the arguments and if needed issuing a warning. */
10054 if (!integer_zerop (arg))
10056 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10058 /* Strip off all nops for the sake of the comparison. This
10059 is not quite the same as STRIP_NOPS. It does more.
10060 We must also strip off INDIRECT_EXPR for C++ reference
10061 parameters. */
10062 while (CONVERT_EXPR_P (arg)
10063 || TREE_CODE (arg) == INDIRECT_REF)
10064 arg = TREE_OPERAND (arg, 0);
10065 if (arg != last_parm)
10067 /* FIXME: Sometimes with the tree optimizers we can get the
10068 not the last argument even though the user used the last
10069 argument. We just warn and set the arg to be the last
10070 argument so that we will get wrong-code because of
10071 it. */
10072 warning_at (current_location,
10073 OPT_Wvarargs,
10074 "second parameter of %<va_start%> not last named argument");
10077 /* Undefined by C99 7.15.1.4p4 (va_start):
10078 "If the parameter parmN is declared with the register storage
10079 class, with a function or array type, or with a type that is
10080 not compatible with the type that results after application of
10081 the default argument promotions, the behavior is undefined."
10083 else if (DECL_REGISTER (arg))
10085 warning_at (current_location,
10086 OPT_Wvarargs,
10087 "undefined behavior when second parameter of "
10088 "%<va_start%> is declared with %<register%> storage");
10091 /* We want to verify the second parameter just once before the tree
10092 optimizers are run and then avoid keeping it in the tree,
10093 as otherwise we could warn even for correct code like:
10094 void foo (int i, ...)
10095 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10096 if (va_start_p)
10097 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10098 else
10099 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10101 return false;
10105 /* Expand a call EXP to __builtin_object_size. */
10107 static rtx
10108 expand_builtin_object_size (tree exp)
10110 tree ost;
10111 int object_size_type;
10112 tree fndecl = get_callee_fndecl (exp);
10114 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10116 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10117 exp, fndecl);
10118 expand_builtin_trap ();
10119 return const0_rtx;
10122 ost = CALL_EXPR_ARG (exp, 1);
10123 STRIP_NOPS (ost);
10125 if (TREE_CODE (ost) != INTEGER_CST
10126 || tree_int_cst_sgn (ost) < 0
10127 || compare_tree_int (ost, 3) > 0)
10129 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10130 exp, fndecl);
10131 expand_builtin_trap ();
10132 return const0_rtx;
10135 object_size_type = tree_to_shwi (ost);
10137 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10140 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10141 FCODE is the BUILT_IN_* to use.
10142 Return NULL_RTX if we failed; the caller should emit a normal call,
10143 otherwise try to get the result in TARGET, if convenient (and in
10144 mode MODE if that's convenient). */
10146 static rtx
10147 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10148 enum built_in_function fcode)
10150 if (!validate_arglist (exp,
10151 POINTER_TYPE,
10152 fcode == BUILT_IN_MEMSET_CHK
10153 ? INTEGER_TYPE : POINTER_TYPE,
10154 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10155 return NULL_RTX;
10157 tree dest = CALL_EXPR_ARG (exp, 0);
10158 tree src = CALL_EXPR_ARG (exp, 1);
10159 tree len = CALL_EXPR_ARG (exp, 2);
10160 tree size = CALL_EXPR_ARG (exp, 3);
10162 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10163 /*str=*/NULL_TREE, size);
10165 if (!tree_fits_uhwi_p (size))
10166 return NULL_RTX;
10168 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10170 /* Avoid transforming the checking call to an ordinary one when
10171 an overflow has been detected or when the call couldn't be
10172 validated because the size is not constant. */
10173 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10174 return NULL_RTX;
10176 tree fn = NULL_TREE;
10177 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10178 mem{cpy,pcpy,move,set} is available. */
10179 switch (fcode)
10181 case BUILT_IN_MEMCPY_CHK:
10182 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10183 break;
10184 case BUILT_IN_MEMPCPY_CHK:
10185 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10186 break;
10187 case BUILT_IN_MEMMOVE_CHK:
10188 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10189 break;
10190 case BUILT_IN_MEMSET_CHK:
10191 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10192 break;
10193 default:
10194 break;
10197 if (! fn)
10198 return NULL_RTX;
10200 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10201 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10202 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10203 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10205 else if (fcode == BUILT_IN_MEMSET_CHK)
10206 return NULL_RTX;
10207 else
10209 unsigned int dest_align = get_pointer_alignment (dest);
10211 /* If DEST is not a pointer type, call the normal function. */
10212 if (dest_align == 0)
10213 return NULL_RTX;
10215 /* If SRC and DEST are the same (and not volatile), do nothing. */
10216 if (operand_equal_p (src, dest, 0))
10218 tree expr;
10220 if (fcode != BUILT_IN_MEMPCPY_CHK)
10222 /* Evaluate and ignore LEN in case it has side-effects. */
10223 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10224 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10227 expr = fold_build_pointer_plus (dest, len);
10228 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10231 /* __memmove_chk special case. */
10232 if (fcode == BUILT_IN_MEMMOVE_CHK)
10234 unsigned int src_align = get_pointer_alignment (src);
10236 if (src_align == 0)
10237 return NULL_RTX;
10239 /* If src is categorized for a readonly section we can use
10240 normal __memcpy_chk. */
10241 if (readonly_data_expr (src))
10243 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10244 if (!fn)
10245 return NULL_RTX;
10246 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10247 dest, src, len, size);
10248 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10249 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10250 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10253 return NULL_RTX;
10257 /* Emit warning if a buffer overflow is detected at compile time. */
10259 static void
10260 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10262 /* The source string. */
10263 tree srcstr = NULL_TREE;
10264 /* The size of the destination object. */
10265 tree objsize = NULL_TREE;
10266 /* The string that is being concatenated with (as in __strcat_chk)
10267 or null if it isn't. */
10268 tree catstr = NULL_TREE;
10269 /* The maximum length of the source sequence in a bounded operation
10270 (such as __strncat_chk) or null if the operation isn't bounded
10271 (such as __strcat_chk). */
10272 tree maxread = NULL_TREE;
10273 /* The exact size of the access (such as in __strncpy_chk). */
10274 tree size = NULL_TREE;
10276 switch (fcode)
10278 case BUILT_IN_STRCPY_CHK:
10279 case BUILT_IN_STPCPY_CHK:
10280 srcstr = CALL_EXPR_ARG (exp, 1);
10281 objsize = CALL_EXPR_ARG (exp, 2);
10282 break;
10284 case BUILT_IN_STRCAT_CHK:
10285 /* For __strcat_chk the warning will be emitted only if overflowing
10286 by at least strlen (dest) + 1 bytes. */
10287 catstr = CALL_EXPR_ARG (exp, 0);
10288 srcstr = CALL_EXPR_ARG (exp, 1);
10289 objsize = CALL_EXPR_ARG (exp, 2);
10290 break;
10292 case BUILT_IN_STRNCAT_CHK:
10293 catstr = CALL_EXPR_ARG (exp, 0);
10294 srcstr = CALL_EXPR_ARG (exp, 1);
10295 maxread = CALL_EXPR_ARG (exp, 2);
10296 objsize = CALL_EXPR_ARG (exp, 3);
10297 break;
10299 case BUILT_IN_STRNCPY_CHK:
10300 case BUILT_IN_STPNCPY_CHK:
10301 srcstr = CALL_EXPR_ARG (exp, 1);
10302 size = CALL_EXPR_ARG (exp, 2);
10303 objsize = CALL_EXPR_ARG (exp, 3);
10304 break;
10306 case BUILT_IN_SNPRINTF_CHK:
10307 case BUILT_IN_VSNPRINTF_CHK:
10308 maxread = CALL_EXPR_ARG (exp, 1);
10309 objsize = CALL_EXPR_ARG (exp, 3);
10310 break;
10311 default:
10312 gcc_unreachable ();
10315 if (catstr && maxread)
10317 /* Check __strncat_chk. There is no way to determine the length
10318 of the string to which the source string is being appended so
10319 just warn when the length of the source string is not known. */
10320 check_strncat_sizes (exp, objsize);
10321 return;
10324 /* The destination argument is the first one for all built-ins above. */
10325 tree dst = CALL_EXPR_ARG (exp, 0);
10327 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10330 /* Emit warning if a buffer overflow is detected at compile time
10331 in __sprintf_chk/__vsprintf_chk calls. */
10333 static void
10334 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10336 tree size, len, fmt;
10337 const char *fmt_str;
10338 int nargs = call_expr_nargs (exp);
10340 /* Verify the required arguments in the original call. */
10342 if (nargs < 4)
10343 return;
10344 size = CALL_EXPR_ARG (exp, 2);
10345 fmt = CALL_EXPR_ARG (exp, 3);
10347 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10348 return;
10350 /* Check whether the format is a literal string constant. */
10351 fmt_str = c_getstr (fmt);
10352 if (fmt_str == NULL)
10353 return;
10355 if (!init_target_chars ())
10356 return;
10358 /* If the format doesn't contain % args or %%, we know its size. */
10359 if (strchr (fmt_str, target_percent) == 0)
10360 len = build_int_cstu (size_type_node, strlen (fmt_str));
10361 /* If the format is "%s" and first ... argument is a string literal,
10362 we know it too. */
10363 else if (fcode == BUILT_IN_SPRINTF_CHK
10364 && strcmp (fmt_str, target_percent_s) == 0)
10366 tree arg;
10368 if (nargs < 5)
10369 return;
10370 arg = CALL_EXPR_ARG (exp, 4);
10371 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10372 return;
10374 len = c_strlen (arg, 1);
10375 if (!len || ! tree_fits_uhwi_p (len))
10376 return;
10378 else
10379 return;
10381 /* Add one for the terminating nul. */
10382 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10384 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10385 /*maxread=*/NULL_TREE, len, size);
10388 /* Emit warning if a free is called with address of a variable. */
10390 static void
10391 maybe_emit_free_warning (tree exp)
10393 tree arg = CALL_EXPR_ARG (exp, 0);
10395 STRIP_NOPS (arg);
10396 if (TREE_CODE (arg) != ADDR_EXPR)
10397 return;
10399 arg = get_base_address (TREE_OPERAND (arg, 0));
10400 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10401 return;
10403 if (SSA_VAR_P (arg))
10404 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10405 "%Kattempt to free a non-heap object %qD", exp, arg);
10406 else
10407 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10408 "%Kattempt to free a non-heap object", exp);
10411 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10412 if possible. */
10414 static tree
10415 fold_builtin_object_size (tree ptr, tree ost)
10417 unsigned HOST_WIDE_INT bytes;
10418 int object_size_type;
10420 if (!validate_arg (ptr, POINTER_TYPE)
10421 || !validate_arg (ost, INTEGER_TYPE))
10422 return NULL_TREE;
10424 STRIP_NOPS (ost);
10426 if (TREE_CODE (ost) != INTEGER_CST
10427 || tree_int_cst_sgn (ost) < 0
10428 || compare_tree_int (ost, 3) > 0)
10429 return NULL_TREE;
10431 object_size_type = tree_to_shwi (ost);
10433 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10434 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10435 and (size_t) 0 for types 2 and 3. */
10436 if (TREE_SIDE_EFFECTS (ptr))
10437 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10439 if (TREE_CODE (ptr) == ADDR_EXPR)
10441 compute_builtin_object_size (ptr, object_size_type, &bytes);
10442 if (wi::fits_to_tree_p (bytes, size_type_node))
10443 return build_int_cstu (size_type_node, bytes);
10445 else if (TREE_CODE (ptr) == SSA_NAME)
10447 /* If object size is not known yet, delay folding until
10448 later. Maybe subsequent passes will help determining
10449 it. */
10450 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10451 && wi::fits_to_tree_p (bytes, size_type_node))
10452 return build_int_cstu (size_type_node, bytes);
10455 return NULL_TREE;
10458 /* Builtins with folding operations that operate on "..." arguments
10459 need special handling; we need to store the arguments in a convenient
10460 data structure before attempting any folding. Fortunately there are
10461 only a few builtins that fall into this category. FNDECL is the
10462 function, EXP is the CALL_EXPR for the call. */
10464 static tree
10465 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10467 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10468 tree ret = NULL_TREE;
10470 switch (fcode)
10472 case BUILT_IN_FPCLASSIFY:
10473 ret = fold_builtin_fpclassify (loc, args, nargs);
10474 break;
10476 default:
10477 break;
10479 if (ret)
10481 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10482 SET_EXPR_LOCATION (ret, loc);
10483 TREE_NO_WARNING (ret) = 1;
10484 return ret;
10486 return NULL_TREE;
10489 /* Initialize format string characters in the target charset. */
10491 bool
10492 init_target_chars (void)
10494 static bool init;
10495 if (!init)
10497 target_newline = lang_hooks.to_target_charset ('\n');
10498 target_percent = lang_hooks.to_target_charset ('%');
10499 target_c = lang_hooks.to_target_charset ('c');
10500 target_s = lang_hooks.to_target_charset ('s');
10501 if (target_newline == 0 || target_percent == 0 || target_c == 0
10502 || target_s == 0)
10503 return false;
10505 target_percent_c[0] = target_percent;
10506 target_percent_c[1] = target_c;
10507 target_percent_c[2] = '\0';
10509 target_percent_s[0] = target_percent;
10510 target_percent_s[1] = target_s;
10511 target_percent_s[2] = '\0';
10513 target_percent_s_newline[0] = target_percent;
10514 target_percent_s_newline[1] = target_s;
10515 target_percent_s_newline[2] = target_newline;
10516 target_percent_s_newline[3] = '\0';
10518 init = true;
10520 return true;
10523 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10524 and no overflow/underflow occurred. INEXACT is true if M was not
10525 exactly calculated. TYPE is the tree type for the result. This
10526 function assumes that you cleared the MPFR flags and then
10527 calculated M to see if anything subsequently set a flag prior to
10528 entering this function. Return NULL_TREE if any checks fail. */
10530 static tree
10531 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10533 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10534 overflow/underflow occurred. If -frounding-math, proceed iff the
10535 result of calling FUNC was exact. */
10536 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10537 && (!flag_rounding_math || !inexact))
10539 REAL_VALUE_TYPE rr;
10541 real_from_mpfr (&rr, m, type, GMP_RNDN);
10542 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10543 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10544 but the mpft_t is not, then we underflowed in the
10545 conversion. */
10546 if (real_isfinite (&rr)
10547 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10549 REAL_VALUE_TYPE rmode;
10551 real_convert (&rmode, TYPE_MODE (type), &rr);
10552 /* Proceed iff the specified mode can hold the value. */
10553 if (real_identical (&rmode, &rr))
10554 return build_real (type, rmode);
10557 return NULL_TREE;
10560 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10561 number and no overflow/underflow occurred. INEXACT is true if M
10562 was not exactly calculated. TYPE is the tree type for the result.
10563 This function assumes that you cleared the MPFR flags and then
10564 calculated M to see if anything subsequently set a flag prior to
10565 entering this function. Return NULL_TREE if any checks fail, if
10566 FORCE_CONVERT is true, then bypass the checks. */
10568 static tree
10569 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10571 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10572 overflow/underflow occurred. If -frounding-math, proceed iff the
10573 result of calling FUNC was exact. */
10574 if (force_convert
10575 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10576 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10577 && (!flag_rounding_math || !inexact)))
10579 REAL_VALUE_TYPE re, im;
10581 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10582 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10583 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10584 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10585 but the mpft_t is not, then we underflowed in the
10586 conversion. */
10587 if (force_convert
10588 || (real_isfinite (&re) && real_isfinite (&im)
10589 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10590 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10592 REAL_VALUE_TYPE re_mode, im_mode;
10594 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10595 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10596 /* Proceed iff the specified mode can hold the value. */
10597 if (force_convert
10598 || (real_identical (&re_mode, &re)
10599 && real_identical (&im_mode, &im)))
10600 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10601 build_real (TREE_TYPE (type), im_mode));
10604 return NULL_TREE;
10607 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10608 the pointer *(ARG_QUO) and return the result. The type is taken
10609 from the type of ARG0 and is used for setting the precision of the
10610 calculation and results. */
10612 static tree
10613 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10615 tree const type = TREE_TYPE (arg0);
10616 tree result = NULL_TREE;
10618 STRIP_NOPS (arg0);
10619 STRIP_NOPS (arg1);
10621 /* To proceed, MPFR must exactly represent the target floating point
10622 format, which only happens when the target base equals two. */
10623 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10624 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10625 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10627 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10628 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10630 if (real_isfinite (ra0) && real_isfinite (ra1))
10632 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10633 const int prec = fmt->p;
10634 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10635 tree result_rem;
10636 long integer_quo;
10637 mpfr_t m0, m1;
10639 mpfr_inits2 (prec, m0, m1, NULL);
10640 mpfr_from_real (m0, ra0, GMP_RNDN);
10641 mpfr_from_real (m1, ra1, GMP_RNDN);
10642 mpfr_clear_flags ();
10643 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10644 /* Remquo is independent of the rounding mode, so pass
10645 inexact=0 to do_mpfr_ckconv(). */
10646 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10647 mpfr_clears (m0, m1, NULL);
10648 if (result_rem)
10650 /* MPFR calculates quo in the host's long so it may
10651 return more bits in quo than the target int can hold
10652 if sizeof(host long) > sizeof(target int). This can
10653 happen even for native compilers in LP64 mode. In
10654 these cases, modulo the quo value with the largest
10655 number that the target int can hold while leaving one
10656 bit for the sign. */
10657 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10658 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10660 /* Dereference the quo pointer argument. */
10661 arg_quo = build_fold_indirect_ref (arg_quo);
10662 /* Proceed iff a valid pointer type was passed in. */
10663 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10665 /* Set the value. */
10666 tree result_quo
10667 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10668 build_int_cst (TREE_TYPE (arg_quo),
10669 integer_quo));
10670 TREE_SIDE_EFFECTS (result_quo) = 1;
10671 /* Combine the quo assignment with the rem. */
10672 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10673 result_quo, result_rem));
10678 return result;
10681 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10682 resulting value as a tree with type TYPE. The mpfr precision is
10683 set to the precision of TYPE. We assume that this mpfr function
10684 returns zero if the result could be calculated exactly within the
10685 requested precision. In addition, the integer pointer represented
10686 by ARG_SG will be dereferenced and set to the appropriate signgam
10687 (-1,1) value. */
10689 static tree
10690 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10692 tree result = NULL_TREE;
10694 STRIP_NOPS (arg);
10696 /* To proceed, MPFR must exactly represent the target floating point
10697 format, which only happens when the target base equals two. Also
10698 verify ARG is a constant and that ARG_SG is an int pointer. */
10699 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10700 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10701 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10702 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10704 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10706 /* In addition to NaN and Inf, the argument cannot be zero or a
10707 negative integer. */
10708 if (real_isfinite (ra)
10709 && ra->cl != rvc_zero
10710 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10712 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10713 const int prec = fmt->p;
10714 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10715 int inexact, sg;
10716 mpfr_t m;
10717 tree result_lg;
10719 mpfr_init2 (m, prec);
10720 mpfr_from_real (m, ra, GMP_RNDN);
10721 mpfr_clear_flags ();
10722 inexact = mpfr_lgamma (m, &sg, m, rnd);
10723 result_lg = do_mpfr_ckconv (m, type, inexact);
10724 mpfr_clear (m);
10725 if (result_lg)
10727 tree result_sg;
10729 /* Dereference the arg_sg pointer argument. */
10730 arg_sg = build_fold_indirect_ref (arg_sg);
10731 /* Assign the signgam value into *arg_sg. */
10732 result_sg = fold_build2 (MODIFY_EXPR,
10733 TREE_TYPE (arg_sg), arg_sg,
10734 build_int_cst (TREE_TYPE (arg_sg), sg));
10735 TREE_SIDE_EFFECTS (result_sg) = 1;
10736 /* Combine the signgam assignment with the lgamma result. */
10737 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10738 result_sg, result_lg));
10743 return result;
10746 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10747 mpc function FUNC on it and return the resulting value as a tree
10748 with type TYPE. The mpfr precision is set to the precision of
10749 TYPE. We assume that function FUNC returns zero if the result
10750 could be calculated exactly within the requested precision. If
10751 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10752 in the arguments and/or results. */
10754 tree
10755 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10756 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10758 tree result = NULL_TREE;
10760 STRIP_NOPS (arg0);
10761 STRIP_NOPS (arg1);
10763 /* To proceed, MPFR must exactly represent the target floating point
10764 format, which only happens when the target base equals two. */
10765 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10767 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10769 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10771 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10772 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10773 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10774 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10776 if (do_nonfinite
10777 || (real_isfinite (re0) && real_isfinite (im0)
10778 && real_isfinite (re1) && real_isfinite (im1)))
10780 const struct real_format *const fmt =
10781 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10782 const int prec = fmt->p;
10783 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10784 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10785 int inexact;
10786 mpc_t m0, m1;
10788 mpc_init2 (m0, prec);
10789 mpc_init2 (m1, prec);
10790 mpfr_from_real (mpc_realref (m0), re0, rnd);
10791 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10792 mpfr_from_real (mpc_realref (m1), re1, rnd);
10793 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10794 mpfr_clear_flags ();
10795 inexact = func (m0, m0, m1, crnd);
10796 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10797 mpc_clear (m0);
10798 mpc_clear (m1);
10802 return result;
10805 /* A wrapper function for builtin folding that prevents warnings for
10806 "statement without effect" and the like, caused by removing the
10807 call node earlier than the warning is generated. */
10809 tree
10810 fold_call_stmt (gcall *stmt, bool ignore)
10812 tree ret = NULL_TREE;
10813 tree fndecl = gimple_call_fndecl (stmt);
10814 location_t loc = gimple_location (stmt);
10815 if (fndecl
10816 && TREE_CODE (fndecl) == FUNCTION_DECL
10817 && DECL_BUILT_IN (fndecl)
10818 && !gimple_call_va_arg_pack_p (stmt))
10820 int nargs = gimple_call_num_args (stmt);
10821 tree *args = (nargs > 0
10822 ? gimple_call_arg_ptr (stmt, 0)
10823 : &error_mark_node);
10825 if (avoid_folding_inline_builtin (fndecl))
10826 return NULL_TREE;
10827 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10829 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10831 else
10833 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10834 if (ret)
10836 /* Propagate location information from original call to
10837 expansion of builtin. Otherwise things like
10838 maybe_emit_chk_warning, that operate on the expansion
10839 of a builtin, will use the wrong location information. */
10840 if (gimple_has_location (stmt))
10842 tree realret = ret;
10843 if (TREE_CODE (ret) == NOP_EXPR)
10844 realret = TREE_OPERAND (ret, 0);
10845 if (CAN_HAVE_LOCATION_P (realret)
10846 && !EXPR_HAS_LOCATION (realret))
10847 SET_EXPR_LOCATION (realret, loc);
10848 return realret;
10850 return ret;
10854 return NULL_TREE;
10857 /* Look up the function in builtin_decl that corresponds to DECL
10858 and set ASMSPEC as its user assembler name. DECL must be a
10859 function decl that declares a builtin. */
10861 void
10862 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10864 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10865 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10866 && asmspec != 0);
10868 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10869 set_user_assembler_name (builtin, asmspec);
10871 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10872 && INT_TYPE_SIZE < BITS_PER_WORD)
10874 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10875 set_user_assembler_libfunc ("ffs", asmspec);
10876 set_optab_libfunc (ffs_optab, mode, "ffs");
10880 /* Return true if DECL is a builtin that expands to a constant or similarly
10881 simple code. */
10882 bool
10883 is_simple_builtin (tree decl)
10885 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10886 switch (DECL_FUNCTION_CODE (decl))
10888 /* Builtins that expand to constants. */
10889 case BUILT_IN_CONSTANT_P:
10890 case BUILT_IN_EXPECT:
10891 case BUILT_IN_OBJECT_SIZE:
10892 case BUILT_IN_UNREACHABLE:
10893 /* Simple register moves or loads from stack. */
10894 case BUILT_IN_ASSUME_ALIGNED:
10895 case BUILT_IN_RETURN_ADDRESS:
10896 case BUILT_IN_EXTRACT_RETURN_ADDR:
10897 case BUILT_IN_FROB_RETURN_ADDR:
10898 case BUILT_IN_RETURN:
10899 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10900 case BUILT_IN_FRAME_ADDRESS:
10901 case BUILT_IN_VA_END:
10902 case BUILT_IN_STACK_SAVE:
10903 case BUILT_IN_STACK_RESTORE:
10904 /* Exception state returns or moves registers around. */
10905 case BUILT_IN_EH_FILTER:
10906 case BUILT_IN_EH_POINTER:
10907 case BUILT_IN_EH_COPY_VALUES:
10908 return true;
10910 default:
10911 return false;
10914 return false;
10917 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10918 most probably expanded inline into reasonably simple code. This is a
10919 superset of is_simple_builtin. */
10920 bool
10921 is_inexpensive_builtin (tree decl)
10923 if (!decl)
10924 return false;
10925 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10926 return true;
10927 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10928 switch (DECL_FUNCTION_CODE (decl))
10930 case BUILT_IN_ABS:
10931 CASE_BUILT_IN_ALLOCA:
10932 case BUILT_IN_BSWAP16:
10933 case BUILT_IN_BSWAP32:
10934 case BUILT_IN_BSWAP64:
10935 case BUILT_IN_CLZ:
10936 case BUILT_IN_CLZIMAX:
10937 case BUILT_IN_CLZL:
10938 case BUILT_IN_CLZLL:
10939 case BUILT_IN_CTZ:
10940 case BUILT_IN_CTZIMAX:
10941 case BUILT_IN_CTZL:
10942 case BUILT_IN_CTZLL:
10943 case BUILT_IN_FFS:
10944 case BUILT_IN_FFSIMAX:
10945 case BUILT_IN_FFSL:
10946 case BUILT_IN_FFSLL:
10947 case BUILT_IN_IMAXABS:
10948 case BUILT_IN_FINITE:
10949 case BUILT_IN_FINITEF:
10950 case BUILT_IN_FINITEL:
10951 case BUILT_IN_FINITED32:
10952 case BUILT_IN_FINITED64:
10953 case BUILT_IN_FINITED128:
10954 case BUILT_IN_FPCLASSIFY:
10955 case BUILT_IN_ISFINITE:
10956 case BUILT_IN_ISINF_SIGN:
10957 case BUILT_IN_ISINF:
10958 case BUILT_IN_ISINFF:
10959 case BUILT_IN_ISINFL:
10960 case BUILT_IN_ISINFD32:
10961 case BUILT_IN_ISINFD64:
10962 case BUILT_IN_ISINFD128:
10963 case BUILT_IN_ISNAN:
10964 case BUILT_IN_ISNANF:
10965 case BUILT_IN_ISNANL:
10966 case BUILT_IN_ISNAND32:
10967 case BUILT_IN_ISNAND64:
10968 case BUILT_IN_ISNAND128:
10969 case BUILT_IN_ISNORMAL:
10970 case BUILT_IN_ISGREATER:
10971 case BUILT_IN_ISGREATEREQUAL:
10972 case BUILT_IN_ISLESS:
10973 case BUILT_IN_ISLESSEQUAL:
10974 case BUILT_IN_ISLESSGREATER:
10975 case BUILT_IN_ISUNORDERED:
10976 case BUILT_IN_VA_ARG_PACK:
10977 case BUILT_IN_VA_ARG_PACK_LEN:
10978 case BUILT_IN_VA_COPY:
10979 case BUILT_IN_TRAP:
10980 case BUILT_IN_SAVEREGS:
10981 case BUILT_IN_POPCOUNTL:
10982 case BUILT_IN_POPCOUNTLL:
10983 case BUILT_IN_POPCOUNTIMAX:
10984 case BUILT_IN_POPCOUNT:
10985 case BUILT_IN_PARITYL:
10986 case BUILT_IN_PARITYLL:
10987 case BUILT_IN_PARITYIMAX:
10988 case BUILT_IN_PARITY:
10989 case BUILT_IN_LABS:
10990 case BUILT_IN_LLABS:
10991 case BUILT_IN_PREFETCH:
10992 case BUILT_IN_ACC_ON_DEVICE:
10993 return true;
10995 default:
10996 return is_simple_builtin (decl);
10999 return false;
11002 /* Return true if T is a constant and the value cast to a target char
11003 can be represented by a host char.
11004 Store the casted char constant in *P if so. */
11006 bool
11007 target_char_cst_p (tree t, char *p)
11009 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11010 return false;
11012 *p = (char)tree_to_uhwi (t);
11013 return true;
11016 /* Return the maximum object size. */
11018 tree
11019 max_object_size (void)
11021 /* To do: Make this a configurable parameter. */
11022 return TYPE_MAX_VALUE (ptrdiff_type_node);