* tree-ssa-dse.c (compute_trims): Avoid folding away undefined
[official-gcc.git] / gcc / builtins.c
blobb1a79f3f33f021e1f1f7f832c9ed2beca79c7e08
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 poly_int64 bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = targetm.constant_alignment (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 unsigned int talign;
341 if (!addr_p && !known_alignment
342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
345 else
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
354 else if (TREE_CODE (exp) == STRING_CST)
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = targetm.constant_alignment (exp, align);
362 known_alignment = true;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
378 /* Account for the alignment of runtime coefficients, so that the constant
379 bitpos is guaranteed to be accurate. */
380 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
381 if (alt_align != 0 && alt_align < align)
383 align = alt_align;
384 known_alignment = false;
387 *alignp = align;
388 *bitposp = bitpos.coeffs[0] & (align - 1);
389 return known_alignment;
392 /* For a memory reference expression EXP compute values M and N such that M
393 divides (&EXP - N) and such that N < M. If these numbers can be determined,
394 store M in alignp and N in *BITPOSP and return true. Otherwise return false
395 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
397 bool
398 get_object_alignment_1 (tree exp, unsigned int *alignp,
399 unsigned HOST_WIDE_INT *bitposp)
401 return get_object_alignment_2 (exp, alignp, bitposp, false);
404 /* Return the alignment in bits of EXP, an object. */
406 unsigned int
407 get_object_alignment (tree exp)
409 unsigned HOST_WIDE_INT bitpos = 0;
410 unsigned int align;
412 get_object_alignment_1 (exp, &align, &bitpos);
414 /* align and bitpos now specify known low bits of the pointer.
415 ptr & (align - 1) == bitpos. */
417 if (bitpos != 0)
418 align = least_bit_hwi (bitpos);
419 return align;
422 /* For a pointer valued expression EXP compute values M and N such that M
423 divides (EXP - N) and such that N < M. If these numbers can be determined,
424 store M in alignp and N in *BITPOSP and return true. Return false if
425 the results are just a conservative approximation.
427 If EXP is not a pointer, false is returned too. */
429 bool
430 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
433 STRIP_NOPS (exp);
435 if (TREE_CODE (exp) == ADDR_EXPR)
436 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
437 alignp, bitposp, true);
438 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
440 unsigned int align;
441 unsigned HOST_WIDE_INT bitpos;
442 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
443 &align, &bitpos);
444 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
445 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
446 else
448 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
449 if (trailing_zeros < HOST_BITS_PER_INT)
451 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
452 if (inner)
453 align = MIN (align, inner);
456 *alignp = align;
457 *bitposp = bitpos & (align - 1);
458 return res;
460 else if (TREE_CODE (exp) == SSA_NAME
461 && POINTER_TYPE_P (TREE_TYPE (exp)))
463 unsigned int ptr_align, ptr_misalign;
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
466 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 *bitposp = ptr_misalign * BITS_PER_UNIT;
469 *alignp = ptr_align * BITS_PER_UNIT;
470 /* Make sure to return a sensible alignment when the multiplication
471 by BITS_PER_UNIT overflowed. */
472 if (*alignp == 0)
473 *alignp = 1u << (HOST_BITS_PER_INT - 1);
474 /* We cannot really tell whether this result is an approximation. */
475 return false;
477 else
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
484 else if (TREE_CODE (exp) == INTEGER_CST)
486 *alignp = BIGGEST_ALIGNMENT;
487 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
488 & (BIGGEST_ALIGNMENT - 1));
489 return true;
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
497 /* Return the alignment in bits of EXP, a pointer valued expression.
498 The alignment returned is, by default, the alignment of the thing that
499 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501 Otherwise, look at the expression to see if we can do better, i.e., if the
502 expression is actually pointing at an object whose alignment is tighter. */
504 unsigned int
505 get_pointer_alignment (tree exp)
507 unsigned HOST_WIDE_INT bitpos = 0;
508 unsigned int align;
510 get_pointer_alignment_1 (exp, &align, &bitpos);
512 /* align and bitpos now specify known low bits of the pointer.
513 ptr & (align - 1) == bitpos. */
515 if (bitpos != 0)
516 align = least_bit_hwi (bitpos);
518 return align;
521 /* Return the number of leading non-zero elements in the sequence
522 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
523 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
525 unsigned
526 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
528 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
530 unsigned n;
532 if (eltsize == 1)
534 /* Optimize the common case of plain char. */
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n;
538 if (!*elt)
539 break;
542 else
544 for (n = 0; n < maxelts; n++)
546 const char *elt = (const char*) ptr + n * eltsize;
547 if (!memcmp (elt, "\0\0\0\0", eltsize))
548 break;
551 return n;
554 /* Compute the length of a null-terminated character string or wide
555 character string handling character sizes of 1, 2, and 4 bytes.
556 TREE_STRING_LENGTH is not the right way because it evaluates to
557 the size of the character array in bytes (as opposed to characters)
558 and because it can contain a zero byte in the middle.
560 ONLY_VALUE should be nonzero if the result is not going to be emitted
561 into the instruction stream and zero if it is going to be expanded.
562 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
563 is returned, otherwise NULL, since
564 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
565 evaluate the side-effects.
567 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
568 accesses. Note that this implies the result is not going to be emitted
569 into the instruction stream.
571 ELTSIZE is 1 for normal single byte character strings, and 2 or
572 4 for wide characer strings. ELTSIZE is by default 1.
574 The value returned is of type `ssizetype'. */
576 tree
577 c_strlen (tree src, int only_value, unsigned eltsize)
579 gcc_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
580 STRIP_NOPS (src);
581 if (TREE_CODE (src) == COND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 tree len1, len2;
586 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
587 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, eltsize);
588 if (tree_int_cst_equal (len1, len2))
589 return len1;
592 if (TREE_CODE (src) == COMPOUND_EXPR
593 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594 return c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
596 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
598 /* Offset from the beginning of the string in bytes. */
599 tree byteoff;
600 tree memsize;
601 src = string_constant (src, &byteoff, &memsize);
602 if (src == 0)
603 return NULL_TREE;
605 /* Determine the size of the string element. */
606 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
607 return NULL_TREE;
609 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
610 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
611 in case the latter is less than the size of the array, such as when
612 SRC refers to a short string literal used to initialize a large array.
613 In that case, the elements of the array after the terminating NUL are
614 all NUL. */
615 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
616 strelts = strelts / eltsize - 1;
618 if (!tree_fits_uhwi_p (memsize))
619 return NULL_TREE;
621 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize - 1;
623 /* PTR can point to the byte representation of any string type, including
624 char* and wchar_t*. */
625 const char *ptr = TREE_STRING_POINTER (src);
627 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
629 /* For empty strings the result should be zero. */
630 if (maxelts == 0)
631 return ssize_int (0);
633 /* The code below works only for single byte character types. */
634 if (eltsize != 1)
635 return NULL_TREE;
637 /* If the string has an internal NUL character followed by any
638 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 the offset to the following NUL if we don't know where to
640 start searching for it. */
641 unsigned len = string_length (ptr, eltsize, strelts);
643 /* Return when an embedded null character is found or none at all. */
644 if (len < strelts || len > maxelts)
645 return NULL_TREE;
647 /* We don't know the starting offset, but we do know that the string
648 has no internal zero bytes. If the offset falls within the bounds
649 of the string subtract the offset from the length of the string,
650 and return that. Otherwise the length is zero. Take care to
651 use SAVE_EXPR in case the OFFSET has side-effects. */
652 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
653 offsave = fold_convert (ssizetype, offsave);
654 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
655 build_int_cst (ssizetype, len));
656 tree lenexp = size_diffop_loc (loc, ssize_int (strelts), offsave);
657 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
658 build_zero_cst (ssizetype));
661 /* Offset from the beginning of the string in elements. */
662 HOST_WIDE_INT eltoff;
664 /* We have a known offset into the string. Start searching there for
665 a null character if we can represent it as a single HOST_WIDE_INT. */
666 if (byteoff == 0)
667 eltoff = 0;
668 else if (! tree_fits_shwi_p (byteoff))
669 eltoff = -1;
670 else
671 eltoff = tree_to_shwi (byteoff) / eltsize;
673 /* If the offset is known to be out of bounds, warn, and call strlen at
674 runtime. */
675 if (eltoff < 0 || eltoff > maxelts)
677 /* Suppress multiple warnings for propagated constant strings. */
678 if (only_value != 2
679 && !TREE_NO_WARNING (src))
681 warning_at (loc, OPT_Warray_bounds,
682 "offset %qwi outside bounds of constant string",
683 eltoff);
684 TREE_NO_WARNING (src) = 1;
686 return NULL_TREE;
689 /* If eltoff is larger than strelts but less than maxelts the
690 string length is zero, since the excess memory will be zero. */
691 if (eltoff > strelts)
692 return ssize_int (0);
694 /* Use strlen to search for the first zero byte. Since any strings
695 constructed with build_string will have nulls appended, we win even
696 if we get handed something like (char[4])"abcd".
698 Since ELTOFF is our starting index into the string, no further
699 calculation is needed. */
700 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
701 strelts - eltoff);
703 return ssize_int (len);
706 /* Return a constant integer corresponding to target reading
707 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
709 static rtx
710 c_readstr (const char *str, scalar_int_mode mode)
712 HOST_WIDE_INT ch;
713 unsigned int i, j;
714 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
716 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
717 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
718 / HOST_BITS_PER_WIDE_INT;
720 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
721 for (i = 0; i < len; i++)
722 tmp[i] = 0;
724 ch = 1;
725 for (i = 0; i < GET_MODE_SIZE (mode); i++)
727 j = i;
728 if (WORDS_BIG_ENDIAN)
729 j = GET_MODE_SIZE (mode) - i - 1;
730 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
731 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
732 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
733 j *= BITS_PER_UNIT;
735 if (ch)
736 ch = (unsigned char) str[i];
737 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
740 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
741 return immed_wide_int_const (c, mode);
744 /* Cast a target constant CST to target CHAR and if that value fits into
745 host char type, return zero and put that value into variable pointed to by
746 P. */
748 static int
749 target_char_cast (tree cst, char *p)
751 unsigned HOST_WIDE_INT val, hostval;
753 if (TREE_CODE (cst) != INTEGER_CST
754 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
755 return 1;
757 /* Do not care if it fits or not right here. */
758 val = TREE_INT_CST_LOW (cst);
760 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
761 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
763 hostval = val;
764 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
765 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
767 if (val != hostval)
768 return 1;
770 *p = hostval;
771 return 0;
774 /* Similar to save_expr, but assumes that arbitrary code is not executed
775 in between the multiple evaluations. In particular, we assume that a
776 non-addressable local variable will not be modified. */
778 static tree
779 builtin_save_expr (tree exp)
781 if (TREE_CODE (exp) == SSA_NAME
782 || (TREE_ADDRESSABLE (exp) == 0
783 && (TREE_CODE (exp) == PARM_DECL
784 || (VAR_P (exp) && !TREE_STATIC (exp)))))
785 return exp;
787 return save_expr (exp);
790 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
791 times to get the address of either a higher stack frame, or a return
792 address located within it (depending on FNDECL_CODE). */
794 static rtx
795 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
797 int i;
798 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
799 if (tem == NULL_RTX)
801 /* For a zero count with __builtin_return_address, we don't care what
802 frame address we return, because target-specific definitions will
803 override us. Therefore frame pointer elimination is OK, and using
804 the soft frame pointer is OK.
806 For a nonzero count, or a zero count with __builtin_frame_address,
807 we require a stable offset from the current frame pointer to the
808 previous one, so we must use the hard frame pointer, and
809 we must disable frame pointer elimination. */
810 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
811 tem = frame_pointer_rtx;
812 else
814 tem = hard_frame_pointer_rtx;
816 /* Tell reload not to eliminate the frame pointer. */
817 crtl->accesses_prior_frames = 1;
821 if (count > 0)
822 SETUP_FRAME_ADDRESSES ();
824 /* On the SPARC, the return address is not in the frame, it is in a
825 register. There is no way to access it off of the current frame
826 pointer, but it can be accessed off the previous frame pointer by
827 reading the value from the register window save area. */
828 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
829 count--;
831 /* Scan back COUNT frames to the specified frame. */
832 for (i = 0; i < count; i++)
834 /* Assume the dynamic chain pointer is in the word that the
835 frame address points to, unless otherwise specified. */
836 tem = DYNAMIC_CHAIN_ADDRESS (tem);
837 tem = memory_address (Pmode, tem);
838 tem = gen_frame_mem (Pmode, tem);
839 tem = copy_to_reg (tem);
842 /* For __builtin_frame_address, return what we've got. But, on
843 the SPARC for example, we may have to add a bias. */
844 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
845 return FRAME_ADDR_RTX (tem);
847 /* For __builtin_return_address, get the return address from that frame. */
848 #ifdef RETURN_ADDR_RTX
849 tem = RETURN_ADDR_RTX (count, tem);
850 #else
851 tem = memory_address (Pmode,
852 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
853 tem = gen_frame_mem (Pmode, tem);
854 #endif
855 return tem;
858 /* Alias set used for setjmp buffer. */
859 static alias_set_type setjmp_alias_set = -1;
861 /* Construct the leading half of a __builtin_setjmp call. Control will
862 return to RECEIVER_LABEL. This is also called directly by the SJLJ
863 exception handling code. */
865 void
866 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
868 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 rtx stack_save;
870 rtx mem;
872 if (setjmp_alias_set == -1)
873 setjmp_alias_set = new_alias_set ();
875 buf_addr = convert_memory_address (Pmode, buf_addr);
877 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
879 /* We store the frame pointer and the address of receiver_label in
880 the buffer and use the rest of it for the stack save area, which
881 is machine-dependent. */
883 mem = gen_rtx_MEM (Pmode, buf_addr);
884 set_mem_alias_set (mem, setjmp_alias_set);
885 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
887 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
888 GET_MODE_SIZE (Pmode))),
889 set_mem_alias_set (mem, setjmp_alias_set);
891 emit_move_insn (validize_mem (mem),
892 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
894 stack_save = gen_rtx_MEM (sa_mode,
895 plus_constant (Pmode, buf_addr,
896 2 * GET_MODE_SIZE (Pmode)));
897 set_mem_alias_set (stack_save, setjmp_alias_set);
898 emit_stack_save (SAVE_NONLOCAL, &stack_save);
900 /* If there is further processing to do, do it. */
901 if (targetm.have_builtin_setjmp_setup ())
902 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
904 /* We have a nonlocal label. */
905 cfun->has_nonlocal_label = 1;
908 /* Construct the trailing part of a __builtin_setjmp call. This is
909 also called directly by the SJLJ exception handling code.
910 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
912 void
913 expand_builtin_setjmp_receiver (rtx receiver_label)
915 rtx chain;
917 /* Mark the FP as used when we get here, so we have to make sure it's
918 marked as used by this function. */
919 emit_use (hard_frame_pointer_rtx);
921 /* Mark the static chain as clobbered here so life information
922 doesn't get messed up for it. */
923 chain = rtx_for_static_chain (current_function_decl, true);
924 if (chain && REG_P (chain))
925 emit_clobber (chain);
927 /* Now put in the code to restore the frame pointer, and argument
928 pointer, if needed. */
929 if (! targetm.have_nonlocal_goto ())
931 /* First adjust our frame pointer to its actual value. It was
932 previously set to the start of the virtual area corresponding to
933 the stacked variables when we branched here and now needs to be
934 adjusted to the actual hardware fp value.
936 Assignments to virtual registers are converted by
937 instantiate_virtual_regs into the corresponding assignment
938 to the underlying register (fp in this case) that makes
939 the original assignment true.
940 So the following insn will actually be decrementing fp by
941 TARGET_STARTING_FRAME_OFFSET. */
942 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
944 /* Restoring the frame pointer also modifies the hard frame pointer.
945 Mark it used (so that the previous assignment remains live once
946 the frame pointer is eliminated) and clobbered (to represent the
947 implicit update from the assignment). */
948 emit_use (hard_frame_pointer_rtx);
949 emit_clobber (hard_frame_pointer_rtx);
952 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
954 /* If the argument pointer can be eliminated in favor of the
955 frame pointer, we don't need to restore it. We assume here
956 that if such an elimination is present, it can always be used.
957 This is the case on all known machines; if we don't make this
958 assumption, we do unnecessary saving on many machines. */
959 size_t i;
960 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
962 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
963 if (elim_regs[i].from == ARG_POINTER_REGNUM
964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
965 break;
967 if (i == ARRAY_SIZE (elim_regs))
969 /* Now restore our arg pointer from the address at which it
970 was saved in our stack frame. */
971 emit_move_insn (crtl->args.internal_arg_pointer,
972 copy_to_reg (get_arg_pointer_save_area ()));
976 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
977 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
978 else if (targetm.have_nonlocal_goto_receiver ())
979 emit_insn (targetm.gen_nonlocal_goto_receiver ());
980 else
981 { /* Nothing */ }
983 /* We must not allow the code we just generated to be reordered by
984 scheduling. Specifically, the update of the frame pointer must
985 happen immediately, not later. */
986 emit_insn (gen_blockage ());
989 /* __builtin_longjmp is passed a pointer to an array of five words (not
990 all will be used on all machines). It operates similarly to the C
991 library function of the same name, but is more efficient. Much of
992 the code below is copied from the handling of non-local gotos. */
994 static void
995 expand_builtin_longjmp (rtx buf_addr, rtx value)
997 rtx fp, lab, stack;
998 rtx_insn *insn, *last;
999 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1001 /* DRAP is needed for stack realign if longjmp is expanded to current
1002 function */
1003 if (SUPPORTS_STACK_ALIGNMENT)
1004 crtl->need_drap = true;
1006 if (setjmp_alias_set == -1)
1007 setjmp_alias_set = new_alias_set ();
1009 buf_addr = convert_memory_address (Pmode, buf_addr);
1011 buf_addr = force_reg (Pmode, buf_addr);
1013 /* We require that the user must pass a second argument of 1, because
1014 that is what builtin_setjmp will return. */
1015 gcc_assert (value == const1_rtx);
1017 last = get_last_insn ();
1018 if (targetm.have_builtin_longjmp ())
1019 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1020 else
1022 fp = gen_rtx_MEM (Pmode, buf_addr);
1023 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1024 GET_MODE_SIZE (Pmode)));
1026 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1027 2 * GET_MODE_SIZE (Pmode)));
1028 set_mem_alias_set (fp, setjmp_alias_set);
1029 set_mem_alias_set (lab, setjmp_alias_set);
1030 set_mem_alias_set (stack, setjmp_alias_set);
1032 /* Pick up FP, label, and SP from the block and jump. This code is
1033 from expand_goto in stmt.c; see there for detailed comments. */
1034 if (targetm.have_nonlocal_goto ())
1035 /* We have to pass a value to the nonlocal_goto pattern that will
1036 get copied into the static_chain pointer, but it does not matter
1037 what that value is, because builtin_setjmp does not use it. */
1038 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1039 else
1041 lab = copy_to_reg (lab);
1043 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1044 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1046 emit_move_insn (hard_frame_pointer_rtx, fp);
1047 emit_stack_restore (SAVE_NONLOCAL, stack);
1049 emit_use (hard_frame_pointer_rtx);
1050 emit_use (stack_pointer_rtx);
1051 emit_indirect_jump (lab);
1055 /* Search backwards and mark the jump insn as a non-local goto.
1056 Note that this precludes the use of __builtin_longjmp to a
1057 __builtin_setjmp target in the same function. However, we've
1058 already cautioned the user that these functions are for
1059 internal exception handling use only. */
1060 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1062 gcc_assert (insn != last);
1064 if (JUMP_P (insn))
1066 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1067 break;
1069 else if (CALL_P (insn))
1070 break;
1074 static inline bool
1075 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1077 return (iter->i < iter->n);
1080 /* This function validates the types of a function call argument list
1081 against a specified list of tree_codes. If the last specifier is a 0,
1082 that represents an ellipsis, otherwise the last specifier must be a
1083 VOID_TYPE. */
1085 static bool
1086 validate_arglist (const_tree callexpr, ...)
1088 enum tree_code code;
1089 bool res = 0;
1090 va_list ap;
1091 const_call_expr_arg_iterator iter;
1092 const_tree arg;
1094 va_start (ap, callexpr);
1095 init_const_call_expr_arg_iterator (callexpr, &iter);
1097 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1098 tree fn = CALL_EXPR_FN (callexpr);
1099 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1101 for (unsigned argno = 1; ; ++argno)
1103 code = (enum tree_code) va_arg (ap, int);
1105 switch (code)
1107 case 0:
1108 /* This signifies an ellipses, any further arguments are all ok. */
1109 res = true;
1110 goto end;
1111 case VOID_TYPE:
1112 /* This signifies an endlink, if no arguments remain, return
1113 true, otherwise return false. */
1114 res = !more_const_call_expr_args_p (&iter);
1115 goto end;
1116 case POINTER_TYPE:
1117 /* The actual argument must be nonnull when either the whole
1118 called function has been declared nonnull, or when the formal
1119 argument corresponding to the actual argument has been. */
1120 if (argmap
1121 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1123 arg = next_const_call_expr_arg (&iter);
1124 if (!validate_arg (arg, code) || integer_zerop (arg))
1125 goto end;
1126 break;
1128 /* FALLTHRU */
1129 default:
1130 /* If no parameters remain or the parameter's code does not
1131 match the specified code, return false. Otherwise continue
1132 checking any remaining arguments. */
1133 arg = next_const_call_expr_arg (&iter);
1134 if (!validate_arg (arg, code))
1135 goto end;
1136 break;
1140 /* We need gotos here since we can only have one VA_CLOSE in a
1141 function. */
1142 end: ;
1143 va_end (ap);
1145 BITMAP_FREE (argmap);
1147 return res;
1150 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1151 and the address of the save area. */
1153 static rtx
1154 expand_builtin_nonlocal_goto (tree exp)
1156 tree t_label, t_save_area;
1157 rtx r_label, r_save_area, r_fp, r_sp;
1158 rtx_insn *insn;
1160 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1161 return NULL_RTX;
1163 t_label = CALL_EXPR_ARG (exp, 0);
1164 t_save_area = CALL_EXPR_ARG (exp, 1);
1166 r_label = expand_normal (t_label);
1167 r_label = convert_memory_address (Pmode, r_label);
1168 r_save_area = expand_normal (t_save_area);
1169 r_save_area = convert_memory_address (Pmode, r_save_area);
1170 /* Copy the address of the save location to a register just in case it was
1171 based on the frame pointer. */
1172 r_save_area = copy_to_reg (r_save_area);
1173 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1174 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1175 plus_constant (Pmode, r_save_area,
1176 GET_MODE_SIZE (Pmode)));
1178 crtl->has_nonlocal_goto = 1;
1180 /* ??? We no longer need to pass the static chain value, afaik. */
1181 if (targetm.have_nonlocal_goto ())
1182 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1183 else
1185 r_label = copy_to_reg (r_label);
1187 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1188 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1190 /* Restore frame pointer for containing function. */
1191 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1192 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1194 /* USE of hard_frame_pointer_rtx added for consistency;
1195 not clear if really needed. */
1196 emit_use (hard_frame_pointer_rtx);
1197 emit_use (stack_pointer_rtx);
1199 /* If the architecture is using a GP register, we must
1200 conservatively assume that the target function makes use of it.
1201 The prologue of functions with nonlocal gotos must therefore
1202 initialize the GP register to the appropriate value, and we
1203 must then make sure that this value is live at the point
1204 of the jump. (Note that this doesn't necessarily apply
1205 to targets with a nonlocal_goto pattern; they are free
1206 to implement it in their own way. Note also that this is
1207 a no-op if the GP register is a global invariant.) */
1208 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1209 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1210 emit_use (pic_offset_table_rtx);
1212 emit_indirect_jump (r_label);
1215 /* Search backwards to the jump insn and mark it as a
1216 non-local goto. */
1217 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1219 if (JUMP_P (insn))
1221 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1222 break;
1224 else if (CALL_P (insn))
1225 break;
1228 return const0_rtx;
1231 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1232 (not all will be used on all machines) that was passed to __builtin_setjmp.
1233 It updates the stack pointer in that block to the current value. This is
1234 also called directly by the SJLJ exception handling code. */
1236 void
1237 expand_builtin_update_setjmp_buf (rtx buf_addr)
1239 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1240 buf_addr = convert_memory_address (Pmode, buf_addr);
1241 rtx stack_save
1242 = gen_rtx_MEM (sa_mode,
1243 memory_address
1244 (sa_mode,
1245 plus_constant (Pmode, buf_addr,
1246 2 * GET_MODE_SIZE (Pmode))));
1248 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1251 /* Expand a call to __builtin_prefetch. For a target that does not support
1252 data prefetch, evaluate the memory address argument in case it has side
1253 effects. */
1255 static void
1256 expand_builtin_prefetch (tree exp)
1258 tree arg0, arg1, arg2;
1259 int nargs;
1260 rtx op0, op1, op2;
1262 if (!validate_arglist (exp, POINTER_TYPE, 0))
1263 return;
1265 arg0 = CALL_EXPR_ARG (exp, 0);
1267 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1268 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1269 locality). */
1270 nargs = call_expr_nargs (exp);
1271 if (nargs > 1)
1272 arg1 = CALL_EXPR_ARG (exp, 1);
1273 else
1274 arg1 = integer_zero_node;
1275 if (nargs > 2)
1276 arg2 = CALL_EXPR_ARG (exp, 2);
1277 else
1278 arg2 = integer_three_node;
1280 /* Argument 0 is an address. */
1281 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1283 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1284 if (TREE_CODE (arg1) != INTEGER_CST)
1286 error ("second argument to %<__builtin_prefetch%> must be a constant");
1287 arg1 = integer_zero_node;
1289 op1 = expand_normal (arg1);
1290 /* Argument 1 must be either zero or one. */
1291 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1293 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1294 " using zero");
1295 op1 = const0_rtx;
1298 /* Argument 2 (locality) must be a compile-time constant int. */
1299 if (TREE_CODE (arg2) != INTEGER_CST)
1301 error ("third argument to %<__builtin_prefetch%> must be a constant");
1302 arg2 = integer_zero_node;
1304 op2 = expand_normal (arg2);
1305 /* Argument 2 must be 0, 1, 2, or 3. */
1306 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1308 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1309 op2 = const0_rtx;
1312 if (targetm.have_prefetch ())
1314 struct expand_operand ops[3];
1316 create_address_operand (&ops[0], op0);
1317 create_integer_operand (&ops[1], INTVAL (op1));
1318 create_integer_operand (&ops[2], INTVAL (op2));
1319 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1320 return;
1323 /* Don't do anything with direct references to volatile memory, but
1324 generate code to handle other side effects. */
1325 if (!MEM_P (op0) && side_effects_p (op0))
1326 emit_insn (op0);
1329 /* Get a MEM rtx for expression EXP which is the address of an operand
1330 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1331 the maximum length of the block of memory that might be accessed or
1332 NULL if unknown. */
1334 static rtx
1335 get_memory_rtx (tree exp, tree len)
1337 tree orig_exp = exp;
1338 rtx addr, mem;
1340 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1341 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1342 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1343 exp = TREE_OPERAND (exp, 0);
1345 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1346 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1348 /* Get an expression we can use to find the attributes to assign to MEM.
1349 First remove any nops. */
1350 while (CONVERT_EXPR_P (exp)
1351 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1352 exp = TREE_OPERAND (exp, 0);
1354 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1355 (as builtin stringops may alias with anything). */
1356 exp = fold_build2 (MEM_REF,
1357 build_array_type (char_type_node,
1358 build_range_type (sizetype,
1359 size_one_node, len)),
1360 exp, build_int_cst (ptr_type_node, 0));
1362 /* If the MEM_REF has no acceptable address, try to get the base object
1363 from the original address we got, and build an all-aliasing
1364 unknown-sized access to that one. */
1365 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1366 set_mem_attributes (mem, exp, 0);
1367 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1368 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1369 0))))
1371 exp = build_fold_addr_expr (exp);
1372 exp = fold_build2 (MEM_REF,
1373 build_array_type (char_type_node,
1374 build_range_type (sizetype,
1375 size_zero_node,
1376 NULL)),
1377 exp, build_int_cst (ptr_type_node, 0));
1378 set_mem_attributes (mem, exp, 0);
1380 set_mem_alias_set (mem, 0);
1381 return mem;
1384 /* Built-in functions to perform an untyped call and return. */
1386 #define apply_args_mode \
1387 (this_target_builtins->x_apply_args_mode)
1388 #define apply_result_mode \
1389 (this_target_builtins->x_apply_result_mode)
1391 /* Return the size required for the block returned by __builtin_apply_args,
1392 and initialize apply_args_mode. */
1394 static int
1395 apply_args_size (void)
1397 static int size = -1;
1398 int align;
1399 unsigned int regno;
1401 /* The values computed by this function never change. */
1402 if (size < 0)
1404 /* The first value is the incoming arg-pointer. */
1405 size = GET_MODE_SIZE (Pmode);
1407 /* The second value is the structure value address unless this is
1408 passed as an "invisible" first argument. */
1409 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1410 size += GET_MODE_SIZE (Pmode);
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if (FUNCTION_ARG_REGNO_P (regno))
1415 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1417 gcc_assert (mode != VOIDmode);
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422 size += GET_MODE_SIZE (mode);
1423 apply_args_mode[regno] = mode;
1425 else
1427 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1430 return size;
1433 /* Return the size required for the block returned by __builtin_apply,
1434 and initialize apply_result_mode. */
1436 static int
1437 apply_result_size (void)
1439 static int size = -1;
1440 int align, regno;
1442 /* The values computed by this function never change. */
1443 if (size < 0)
1445 size = 0;
1447 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1448 if (targetm.calls.function_value_regno_p (regno))
1450 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1452 gcc_assert (mode != VOIDmode);
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 size += GET_MODE_SIZE (mode);
1458 apply_result_mode[regno] = mode;
1460 else
1461 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1463 /* Allow targets that use untyped_call and untyped_return to override
1464 the size so that machine-specific information can be stored here. */
1465 #ifdef APPLY_RESULT_SIZE
1466 size = APPLY_RESULT_SIZE;
1467 #endif
1469 return size;
1472 /* Create a vector describing the result block RESULT. If SAVEP is true,
1473 the result block is used to save the values; otherwise it is used to
1474 restore the values. */
1476 static rtx
1477 result_vector (int savep, rtx result)
1479 int regno, size, align, nelts;
1480 fixed_size_mode mode;
1481 rtx reg, mem;
1482 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1484 size = nelts = 0;
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_result_mode[regno]) != VOIDmode)
1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1492 mem = adjust_address (result, mode, size);
1493 savevec[nelts++] = (savep
1494 ? gen_rtx_SET (mem, reg)
1495 : gen_rtx_SET (reg, mem));
1496 size += GET_MODE_SIZE (mode);
1498 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1501 /* Save the state required to perform an untyped call with the same
1502 arguments as were passed to the current function. */
1504 static rtx
1505 expand_builtin_apply_args_1 (void)
1507 rtx registers, tem;
1508 int size, align, regno;
1509 fixed_size_mode mode;
1510 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1512 /* Create a block where the arg-pointer, structure value address,
1513 and argument registers can be saved. */
1514 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1516 /* Walk past the arg-pointer and structure value address. */
1517 size = GET_MODE_SIZE (Pmode);
1518 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1519 size += GET_MODE_SIZE (Pmode);
1521 /* Save each register used in calling a function to the block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_args_mode[regno]) != VOIDmode)
1525 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1526 if (size % align != 0)
1527 size = CEIL (size, align) * align;
1529 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1531 emit_move_insn (adjust_address (registers, mode, size), tem);
1532 size += GET_MODE_SIZE (mode);
1535 /* Save the arg pointer to the block. */
1536 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1537 /* We need the pointer as the caller actually passed them to us, not
1538 as we might have pretended they were passed. Make sure it's a valid
1539 operand, as emit_move_insn isn't expected to handle a PLUS. */
1540 if (STACK_GROWS_DOWNWARD)
1542 = force_operand (plus_constant (Pmode, tem,
1543 crtl->args.pretend_args_size),
1544 NULL_RTX);
1545 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1547 size = GET_MODE_SIZE (Pmode);
1549 /* Save the structure value address unless this is passed as an
1550 "invisible" first argument. */
1551 if (struct_incoming_value)
1553 emit_move_insn (adjust_address (registers, Pmode, size),
1554 copy_to_reg (struct_incoming_value));
1555 size += GET_MODE_SIZE (Pmode);
1558 /* Return the address of the block. */
1559 return copy_addr_to_reg (XEXP (registers, 0));
1562 /* __builtin_apply_args returns block of memory allocated on
1563 the stack into which is stored the arg pointer, structure
1564 value address, static chain, and all the registers that might
1565 possibly be used in performing a function call. The code is
1566 moved to the start of the function so the incoming values are
1567 saved. */
1569 static rtx
1570 expand_builtin_apply_args (void)
1572 /* Don't do __builtin_apply_args more than once in a function.
1573 Save the result of the first call and reuse it. */
1574 if (apply_args_value != 0)
1575 return apply_args_value;
1577 /* When this function is called, it means that registers must be
1578 saved on entry to this function. So we migrate the
1579 call to the first insn of this function. */
1580 rtx temp;
1582 start_sequence ();
1583 temp = expand_builtin_apply_args_1 ();
1584 rtx_insn *seq = get_insns ();
1585 end_sequence ();
1587 apply_args_value = temp;
1589 /* Put the insns after the NOTE that starts the function.
1590 If this is inside a start_sequence, make the outer-level insn
1591 chain current, so the code is placed at the start of the
1592 function. If internal_arg_pointer is a non-virtual pseudo,
1593 it needs to be placed after the function that initializes
1594 that pseudo. */
1595 push_topmost_sequence ();
1596 if (REG_P (crtl->args.internal_arg_pointer)
1597 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1598 emit_insn_before (seq, parm_birth_insn);
1599 else
1600 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1601 pop_topmost_sequence ();
1602 return temp;
1606 /* Perform an untyped call and save the state required to perform an
1607 untyped return of whatever value was returned by the given function. */
1609 static rtx
1610 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1612 int size, align, regno;
1613 fixed_size_mode mode;
1614 rtx incoming_args, result, reg, dest, src;
1615 rtx_call_insn *call_insn;
1616 rtx old_stack_level = 0;
1617 rtx call_fusage = 0;
1618 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1620 arguments = convert_memory_address (Pmode, arguments);
1622 /* Create a block where the return registers can be saved. */
1623 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1625 /* Fetch the arg pointer from the ARGUMENTS block. */
1626 incoming_args = gen_reg_rtx (Pmode);
1627 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1628 if (!STACK_GROWS_DOWNWARD)
1629 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1630 incoming_args, 0, OPTAB_LIB_WIDEN);
1632 /* Push a new argument block and copy the arguments. Do not allow
1633 the (potential) memcpy call below to interfere with our stack
1634 manipulations. */
1635 do_pending_stack_adjust ();
1636 NO_DEFER_POP;
1638 /* Save the stack with nonlocal if available. */
1639 if (targetm.have_save_stack_nonlocal ())
1640 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1641 else
1642 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1644 /* Allocate a block of memory onto the stack and copy the memory
1645 arguments to the outgoing arguments address. We can pass TRUE
1646 as the 4th argument because we just saved the stack pointer
1647 and will restore it right after the call. */
1648 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1650 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1651 may have already set current_function_calls_alloca to true.
1652 current_function_calls_alloca won't be set if argsize is zero,
1653 so we have to guarantee need_drap is true here. */
1654 if (SUPPORTS_STACK_ALIGNMENT)
1655 crtl->need_drap = true;
1657 dest = virtual_outgoing_args_rtx;
1658 if (!STACK_GROWS_DOWNWARD)
1660 if (CONST_INT_P (argsize))
1661 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1662 else
1663 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1665 dest = gen_rtx_MEM (BLKmode, dest);
1666 set_mem_align (dest, PARM_BOUNDARY);
1667 src = gen_rtx_MEM (BLKmode, incoming_args);
1668 set_mem_align (src, PARM_BOUNDARY);
1669 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1671 /* Refer to the argument block. */
1672 apply_args_size ();
1673 arguments = gen_rtx_MEM (BLKmode, arguments);
1674 set_mem_align (arguments, PARM_BOUNDARY);
1676 /* Walk past the arg-pointer and structure value address. */
1677 size = GET_MODE_SIZE (Pmode);
1678 if (struct_value)
1679 size += GET_MODE_SIZE (Pmode);
1681 /* Restore each of the registers previously saved. Make USE insns
1682 for each of these registers for use in making the call. */
1683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1684 if ((mode = apply_args_mode[regno]) != VOIDmode)
1686 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1687 if (size % align != 0)
1688 size = CEIL (size, align) * align;
1689 reg = gen_rtx_REG (mode, regno);
1690 emit_move_insn (reg, adjust_address (arguments, mode, size));
1691 use_reg (&call_fusage, reg);
1692 size += GET_MODE_SIZE (mode);
1695 /* Restore the structure value address unless this is passed as an
1696 "invisible" first argument. */
1697 size = GET_MODE_SIZE (Pmode);
1698 if (struct_value)
1700 rtx value = gen_reg_rtx (Pmode);
1701 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1702 emit_move_insn (struct_value, value);
1703 if (REG_P (struct_value))
1704 use_reg (&call_fusage, struct_value);
1705 size += GET_MODE_SIZE (Pmode);
1708 /* All arguments and registers used for the call are set up by now! */
1709 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1711 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1712 and we don't want to load it into a register as an optimization,
1713 because prepare_call_address already did it if it should be done. */
1714 if (GET_CODE (function) != SYMBOL_REF)
1715 function = memory_address (FUNCTION_MODE, function);
1717 /* Generate the actual call instruction and save the return value. */
1718 if (targetm.have_untyped_call ())
1720 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1721 emit_call_insn (targetm.gen_untyped_call (mem, result,
1722 result_vector (1, result)));
1724 else if (targetm.have_call_value ())
1726 rtx valreg = 0;
1728 /* Locate the unique return register. It is not possible to
1729 express a call that sets more than one return register using
1730 call_value; use untyped_call for that. In fact, untyped_call
1731 only needs to save the return registers in the given block. */
1732 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1733 if ((mode = apply_result_mode[regno]) != VOIDmode)
1735 gcc_assert (!valreg); /* have_untyped_call required. */
1737 valreg = gen_rtx_REG (mode, regno);
1740 emit_insn (targetm.gen_call_value (valreg,
1741 gen_rtx_MEM (FUNCTION_MODE, function),
1742 const0_rtx, NULL_RTX, const0_rtx));
1744 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1746 else
1747 gcc_unreachable ();
1749 /* Find the CALL insn we just emitted, and attach the register usage
1750 information. */
1751 call_insn = last_call_insn ();
1752 add_function_usage_to (call_insn, call_fusage);
1754 /* Restore the stack. */
1755 if (targetm.have_save_stack_nonlocal ())
1756 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1757 else
1758 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1759 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1761 OK_DEFER_POP;
1763 /* Return the address of the result block. */
1764 result = copy_addr_to_reg (XEXP (result, 0));
1765 return convert_memory_address (ptr_mode, result);
1768 /* Perform an untyped return. */
1770 static void
1771 expand_builtin_return (rtx result)
1773 int size, align, regno;
1774 fixed_size_mode mode;
1775 rtx reg;
1776 rtx_insn *call_fusage = 0;
1778 result = convert_memory_address (Pmode, result);
1780 apply_result_size ();
1781 result = gen_rtx_MEM (BLKmode, result);
1783 if (targetm.have_untyped_return ())
1785 rtx vector = result_vector (0, result);
1786 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1787 emit_barrier ();
1788 return;
1791 /* Restore the return value and note that each value is used. */
1792 size = 0;
1793 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1794 if ((mode = apply_result_mode[regno]) != VOIDmode)
1796 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1797 if (size % align != 0)
1798 size = CEIL (size, align) * align;
1799 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1800 emit_move_insn (reg, adjust_address (result, mode, size));
1802 push_to_sequence (call_fusage);
1803 emit_use (reg);
1804 call_fusage = get_insns ();
1805 end_sequence ();
1806 size += GET_MODE_SIZE (mode);
1809 /* Put the USE insns before the return. */
1810 emit_insn (call_fusage);
1812 /* Return whatever values was restored by jumping directly to the end
1813 of the function. */
1814 expand_naked_return ();
1817 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1819 static enum type_class
1820 type_to_class (tree type)
1822 switch (TREE_CODE (type))
1824 case VOID_TYPE: return void_type_class;
1825 case INTEGER_TYPE: return integer_type_class;
1826 case ENUMERAL_TYPE: return enumeral_type_class;
1827 case BOOLEAN_TYPE: return boolean_type_class;
1828 case POINTER_TYPE: return pointer_type_class;
1829 case REFERENCE_TYPE: return reference_type_class;
1830 case OFFSET_TYPE: return offset_type_class;
1831 case REAL_TYPE: return real_type_class;
1832 case COMPLEX_TYPE: return complex_type_class;
1833 case FUNCTION_TYPE: return function_type_class;
1834 case METHOD_TYPE: return method_type_class;
1835 case RECORD_TYPE: return record_type_class;
1836 case UNION_TYPE:
1837 case QUAL_UNION_TYPE: return union_type_class;
1838 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1839 ? string_type_class : array_type_class);
1840 case LANG_TYPE: return lang_type_class;
1841 default: return no_type_class;
1845 /* Expand a call EXP to __builtin_classify_type. */
1847 static rtx
1848 expand_builtin_classify_type (tree exp)
1850 if (call_expr_nargs (exp))
1851 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1852 return GEN_INT (no_type_class);
1855 /* This helper macro, meant to be used in mathfn_built_in below, determines
1856 which among a set of builtin math functions is appropriate for a given type
1857 mode. The `F' (float) and `L' (long double) are automatically generated
1858 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1859 types, there are additional types that are considered with 'F32', 'F64',
1860 'F128', etc. suffixes. */
1861 #define CASE_MATHFN(MATHFN) \
1862 CASE_CFN_##MATHFN: \
1863 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1864 fcodel = BUILT_IN_##MATHFN##L ; break;
1865 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1866 types. */
1867 #define CASE_MATHFN_FLOATN(MATHFN) \
1868 CASE_CFN_##MATHFN: \
1869 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1870 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1871 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1872 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1873 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1874 break;
1875 /* Similar to above, but appends _R after any F/L suffix. */
1876 #define CASE_MATHFN_REENT(MATHFN) \
1877 case CFN_BUILT_IN_##MATHFN##_R: \
1878 case CFN_BUILT_IN_##MATHFN##F_R: \
1879 case CFN_BUILT_IN_##MATHFN##L_R: \
1880 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1881 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1883 /* Return a function equivalent to FN but operating on floating-point
1884 values of type TYPE, or END_BUILTINS if no such function exists.
1885 This is purely an operation on function codes; it does not guarantee
1886 that the target actually has an implementation of the function. */
1888 static built_in_function
1889 mathfn_built_in_2 (tree type, combined_fn fn)
1891 tree mtype;
1892 built_in_function fcode, fcodef, fcodel;
1893 built_in_function fcodef16 = END_BUILTINS;
1894 built_in_function fcodef32 = END_BUILTINS;
1895 built_in_function fcodef64 = END_BUILTINS;
1896 built_in_function fcodef128 = END_BUILTINS;
1897 built_in_function fcodef32x = END_BUILTINS;
1898 built_in_function fcodef64x = END_BUILTINS;
1899 built_in_function fcodef128x = END_BUILTINS;
1901 switch (fn)
1903 CASE_MATHFN (ACOS)
1904 CASE_MATHFN (ACOSH)
1905 CASE_MATHFN (ASIN)
1906 CASE_MATHFN (ASINH)
1907 CASE_MATHFN (ATAN)
1908 CASE_MATHFN (ATAN2)
1909 CASE_MATHFN (ATANH)
1910 CASE_MATHFN (CBRT)
1911 CASE_MATHFN_FLOATN (CEIL)
1912 CASE_MATHFN (CEXPI)
1913 CASE_MATHFN_FLOATN (COPYSIGN)
1914 CASE_MATHFN (COS)
1915 CASE_MATHFN (COSH)
1916 CASE_MATHFN (DREM)
1917 CASE_MATHFN (ERF)
1918 CASE_MATHFN (ERFC)
1919 CASE_MATHFN (EXP)
1920 CASE_MATHFN (EXP10)
1921 CASE_MATHFN (EXP2)
1922 CASE_MATHFN (EXPM1)
1923 CASE_MATHFN (FABS)
1924 CASE_MATHFN (FDIM)
1925 CASE_MATHFN_FLOATN (FLOOR)
1926 CASE_MATHFN_FLOATN (FMA)
1927 CASE_MATHFN_FLOATN (FMAX)
1928 CASE_MATHFN_FLOATN (FMIN)
1929 CASE_MATHFN (FMOD)
1930 CASE_MATHFN (FREXP)
1931 CASE_MATHFN (GAMMA)
1932 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1933 CASE_MATHFN (HUGE_VAL)
1934 CASE_MATHFN (HYPOT)
1935 CASE_MATHFN (ILOGB)
1936 CASE_MATHFN (ICEIL)
1937 CASE_MATHFN (IFLOOR)
1938 CASE_MATHFN (INF)
1939 CASE_MATHFN (IRINT)
1940 CASE_MATHFN (IROUND)
1941 CASE_MATHFN (ISINF)
1942 CASE_MATHFN (J0)
1943 CASE_MATHFN (J1)
1944 CASE_MATHFN (JN)
1945 CASE_MATHFN (LCEIL)
1946 CASE_MATHFN (LDEXP)
1947 CASE_MATHFN (LFLOOR)
1948 CASE_MATHFN (LGAMMA)
1949 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1950 CASE_MATHFN (LLCEIL)
1951 CASE_MATHFN (LLFLOOR)
1952 CASE_MATHFN (LLRINT)
1953 CASE_MATHFN (LLROUND)
1954 CASE_MATHFN (LOG)
1955 CASE_MATHFN (LOG10)
1956 CASE_MATHFN (LOG1P)
1957 CASE_MATHFN (LOG2)
1958 CASE_MATHFN (LOGB)
1959 CASE_MATHFN (LRINT)
1960 CASE_MATHFN (LROUND)
1961 CASE_MATHFN (MODF)
1962 CASE_MATHFN (NAN)
1963 CASE_MATHFN (NANS)
1964 CASE_MATHFN_FLOATN (NEARBYINT)
1965 CASE_MATHFN (NEXTAFTER)
1966 CASE_MATHFN (NEXTTOWARD)
1967 CASE_MATHFN (POW)
1968 CASE_MATHFN (POWI)
1969 CASE_MATHFN (POW10)
1970 CASE_MATHFN (REMAINDER)
1971 CASE_MATHFN (REMQUO)
1972 CASE_MATHFN_FLOATN (RINT)
1973 CASE_MATHFN_FLOATN (ROUND)
1974 CASE_MATHFN (SCALB)
1975 CASE_MATHFN (SCALBLN)
1976 CASE_MATHFN (SCALBN)
1977 CASE_MATHFN (SIGNBIT)
1978 CASE_MATHFN (SIGNIFICAND)
1979 CASE_MATHFN (SIN)
1980 CASE_MATHFN (SINCOS)
1981 CASE_MATHFN (SINH)
1982 CASE_MATHFN_FLOATN (SQRT)
1983 CASE_MATHFN (TAN)
1984 CASE_MATHFN (TANH)
1985 CASE_MATHFN (TGAMMA)
1986 CASE_MATHFN_FLOATN (TRUNC)
1987 CASE_MATHFN (Y0)
1988 CASE_MATHFN (Y1)
1989 CASE_MATHFN (YN)
1991 default:
1992 return END_BUILTINS;
1995 mtype = TYPE_MAIN_VARIANT (type);
1996 if (mtype == double_type_node)
1997 return fcode;
1998 else if (mtype == float_type_node)
1999 return fcodef;
2000 else if (mtype == long_double_type_node)
2001 return fcodel;
2002 else if (mtype == float16_type_node)
2003 return fcodef16;
2004 else if (mtype == float32_type_node)
2005 return fcodef32;
2006 else if (mtype == float64_type_node)
2007 return fcodef64;
2008 else if (mtype == float128_type_node)
2009 return fcodef128;
2010 else if (mtype == float32x_type_node)
2011 return fcodef32x;
2012 else if (mtype == float64x_type_node)
2013 return fcodef64x;
2014 else if (mtype == float128x_type_node)
2015 return fcodef128x;
2016 else
2017 return END_BUILTINS;
2020 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2021 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2022 otherwise use the explicit declaration. If we can't do the conversion,
2023 return null. */
2025 static tree
2026 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2028 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2029 if (fcode2 == END_BUILTINS)
2030 return NULL_TREE;
2032 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2033 return NULL_TREE;
2035 return builtin_decl_explicit (fcode2);
2038 /* Like mathfn_built_in_1, but always use the implicit array. */
2040 tree
2041 mathfn_built_in (tree type, combined_fn fn)
2043 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2046 /* Like mathfn_built_in_1, but take a built_in_function and
2047 always use the implicit array. */
2049 tree
2050 mathfn_built_in (tree type, enum built_in_function fn)
2052 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2055 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2056 return its code, otherwise return IFN_LAST. Note that this function
2057 only tests whether the function is defined in internals.def, not whether
2058 it is actually available on the target. */
2060 internal_fn
2061 associated_internal_fn (tree fndecl)
2063 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2064 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2065 switch (DECL_FUNCTION_CODE (fndecl))
2067 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2068 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2069 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2070 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2071 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2072 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2073 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2074 #include "internal-fn.def"
2076 CASE_FLT_FN (BUILT_IN_POW10):
2077 return IFN_EXP10;
2079 CASE_FLT_FN (BUILT_IN_DREM):
2080 return IFN_REMAINDER;
2082 CASE_FLT_FN (BUILT_IN_SCALBN):
2083 CASE_FLT_FN (BUILT_IN_SCALBLN):
2084 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2085 return IFN_LDEXP;
2086 return IFN_LAST;
2088 default:
2089 return IFN_LAST;
2093 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2094 on the current target by a call to an internal function, return the
2095 code of that internal function, otherwise return IFN_LAST. The caller
2096 is responsible for ensuring that any side-effects of the built-in
2097 call are dealt with correctly. E.g. if CALL sets errno, the caller
2098 must decide that the errno result isn't needed or make it available
2099 in some other way. */
2101 internal_fn
2102 replacement_internal_fn (gcall *call)
2104 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2106 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2107 if (ifn != IFN_LAST)
2109 tree_pair types = direct_internal_fn_types (ifn, call);
2110 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2111 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2112 return ifn;
2115 return IFN_LAST;
2118 /* Expand a call to the builtin trinary math functions (fma).
2119 Return NULL_RTX if a normal call should be emitted rather than expanding the
2120 function in-line. EXP is the expression that is a call to the builtin
2121 function; if convenient, the result should be placed in TARGET.
2122 SUBTARGET may be used as the target for computing one of EXP's
2123 operands. */
2125 static rtx
2126 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2128 optab builtin_optab;
2129 rtx op0, op1, op2, result;
2130 rtx_insn *insns;
2131 tree fndecl = get_callee_fndecl (exp);
2132 tree arg0, arg1, arg2;
2133 machine_mode mode;
2135 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2136 return NULL_RTX;
2138 arg0 = CALL_EXPR_ARG (exp, 0);
2139 arg1 = CALL_EXPR_ARG (exp, 1);
2140 arg2 = CALL_EXPR_ARG (exp, 2);
2142 switch (DECL_FUNCTION_CODE (fndecl))
2144 CASE_FLT_FN (BUILT_IN_FMA):
2145 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2146 builtin_optab = fma_optab; break;
2147 default:
2148 gcc_unreachable ();
2151 /* Make a suitable register to place result in. */
2152 mode = TYPE_MODE (TREE_TYPE (exp));
2154 /* Before working hard, check whether the instruction is available. */
2155 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2156 return NULL_RTX;
2158 result = gen_reg_rtx (mode);
2160 /* Always stabilize the argument list. */
2161 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2162 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2163 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2165 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2166 op1 = expand_normal (arg1);
2167 op2 = expand_normal (arg2);
2169 start_sequence ();
2171 /* Compute into RESULT.
2172 Set RESULT to wherever the result comes back. */
2173 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2174 result, 0);
2176 /* If we were unable to expand via the builtin, stop the sequence
2177 (without outputting the insns) and call to the library function
2178 with the stabilized argument list. */
2179 if (result == 0)
2181 end_sequence ();
2182 return expand_call (exp, target, target == const0_rtx);
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2187 end_sequence ();
2188 emit_insn (insns);
2190 return result;
2193 /* Expand a call to the builtin sin and cos math functions.
2194 Return NULL_RTX if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET.
2197 SUBTARGET may be used as the target for computing one of EXP's
2198 operands. */
2200 static rtx
2201 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2203 optab builtin_optab;
2204 rtx op0;
2205 rtx_insn *insns;
2206 tree fndecl = get_callee_fndecl (exp);
2207 machine_mode mode;
2208 tree arg;
2210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2211 return NULL_RTX;
2213 arg = CALL_EXPR_ARG (exp, 0);
2215 switch (DECL_FUNCTION_CODE (fndecl))
2217 CASE_FLT_FN (BUILT_IN_SIN):
2218 CASE_FLT_FN (BUILT_IN_COS):
2219 builtin_optab = sincos_optab; break;
2220 default:
2221 gcc_unreachable ();
2224 /* Make a suitable register to place result in. */
2225 mode = TYPE_MODE (TREE_TYPE (exp));
2227 /* Check if sincos insn is available, otherwise fallback
2228 to sin or cos insn. */
2229 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2230 switch (DECL_FUNCTION_CODE (fndecl))
2232 CASE_FLT_FN (BUILT_IN_SIN):
2233 builtin_optab = sin_optab; break;
2234 CASE_FLT_FN (BUILT_IN_COS):
2235 builtin_optab = cos_optab; break;
2236 default:
2237 gcc_unreachable ();
2240 /* Before working hard, check whether the instruction is available. */
2241 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2243 rtx result = gen_reg_rtx (mode);
2245 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2246 need to expand the argument again. This way, we will not perform
2247 side-effects more the once. */
2248 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2250 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2252 start_sequence ();
2254 /* Compute into RESULT.
2255 Set RESULT to wherever the result comes back. */
2256 if (builtin_optab == sincos_optab)
2258 int ok;
2260 switch (DECL_FUNCTION_CODE (fndecl))
2262 CASE_FLT_FN (BUILT_IN_SIN):
2263 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2264 break;
2265 CASE_FLT_FN (BUILT_IN_COS):
2266 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2267 break;
2268 default:
2269 gcc_unreachable ();
2271 gcc_assert (ok);
2273 else
2274 result = expand_unop (mode, builtin_optab, op0, result, 0);
2276 if (result != 0)
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2282 return result;
2285 /* If we were unable to expand via the builtin, stop the sequence
2286 (without outputting the insns) and call to the library function
2287 with the stabilized argument list. */
2288 end_sequence ();
2291 return expand_call (exp, target, target == const0_rtx);
2294 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2295 return an RTL instruction code that implements the functionality.
2296 If that isn't possible or available return CODE_FOR_nothing. */
2298 static enum insn_code
2299 interclass_mathfn_icode (tree arg, tree fndecl)
2301 bool errno_set = false;
2302 optab builtin_optab = unknown_optab;
2303 machine_mode mode;
2305 switch (DECL_FUNCTION_CODE (fndecl))
2307 CASE_FLT_FN (BUILT_IN_ILOGB):
2308 errno_set = true; builtin_optab = ilogb_optab; break;
2309 CASE_FLT_FN (BUILT_IN_ISINF):
2310 builtin_optab = isinf_optab; break;
2311 case BUILT_IN_ISNORMAL:
2312 case BUILT_IN_ISFINITE:
2313 CASE_FLT_FN (BUILT_IN_FINITE):
2314 case BUILT_IN_FINITED32:
2315 case BUILT_IN_FINITED64:
2316 case BUILT_IN_FINITED128:
2317 case BUILT_IN_ISINFD32:
2318 case BUILT_IN_ISINFD64:
2319 case BUILT_IN_ISINFD128:
2320 /* These builtins have no optabs (yet). */
2321 break;
2322 default:
2323 gcc_unreachable ();
2326 /* There's no easy way to detect the case we need to set EDOM. */
2327 if (flag_errno_math && errno_set)
2328 return CODE_FOR_nothing;
2330 /* Optab mode depends on the mode of the input argument. */
2331 mode = TYPE_MODE (TREE_TYPE (arg));
2333 if (builtin_optab)
2334 return optab_handler (builtin_optab, mode);
2335 return CODE_FOR_nothing;
2338 /* Expand a call to one of the builtin math functions that operate on
2339 floating point argument and output an integer result (ilogb, isinf,
2340 isnan, etc).
2341 Return 0 if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function; if convenient, the result should be placed in TARGET. */
2345 static rtx
2346 expand_builtin_interclass_mathfn (tree exp, rtx target)
2348 enum insn_code icode = CODE_FOR_nothing;
2349 rtx op0;
2350 tree fndecl = get_callee_fndecl (exp);
2351 machine_mode mode;
2352 tree arg;
2354 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2355 return NULL_RTX;
2357 arg = CALL_EXPR_ARG (exp, 0);
2358 icode = interclass_mathfn_icode (arg, fndecl);
2359 mode = TYPE_MODE (TREE_TYPE (arg));
2361 if (icode != CODE_FOR_nothing)
2363 struct expand_operand ops[1];
2364 rtx_insn *last = get_last_insn ();
2365 tree orig_arg = arg;
2367 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2368 need to expand the argument again. This way, we will not perform
2369 side-effects more the once. */
2370 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2372 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2374 if (mode != GET_MODE (op0))
2375 op0 = convert_to_mode (mode, op0, 0);
2377 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2378 if (maybe_legitimize_operands (icode, 0, 1, ops)
2379 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2380 return ops[0].value;
2382 delete_insns_since (last);
2383 CALL_EXPR_ARG (exp, 0) = orig_arg;
2386 return NULL_RTX;
2389 /* Expand a call to the builtin sincos math function.
2390 Return NULL_RTX if a normal call should be emitted rather than expanding the
2391 function in-line. EXP is the expression that is a call to the builtin
2392 function. */
2394 static rtx
2395 expand_builtin_sincos (tree exp)
2397 rtx op0, op1, op2, target1, target2;
2398 machine_mode mode;
2399 tree arg, sinp, cosp;
2400 int result;
2401 location_t loc = EXPR_LOCATION (exp);
2402 tree alias_type, alias_off;
2404 if (!validate_arglist (exp, REAL_TYPE,
2405 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2406 return NULL_RTX;
2408 arg = CALL_EXPR_ARG (exp, 0);
2409 sinp = CALL_EXPR_ARG (exp, 1);
2410 cosp = CALL_EXPR_ARG (exp, 2);
2412 /* Make a suitable register to place result in. */
2413 mode = TYPE_MODE (TREE_TYPE (arg));
2415 /* Check if sincos insn is available, otherwise emit the call. */
2416 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2417 return NULL_RTX;
2419 target1 = gen_reg_rtx (mode);
2420 target2 = gen_reg_rtx (mode);
2422 op0 = expand_normal (arg);
2423 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2424 alias_off = build_int_cst (alias_type, 0);
2425 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2426 sinp, alias_off));
2427 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2428 cosp, alias_off));
2430 /* Compute into target1 and target2.
2431 Set TARGET to wherever the result comes back. */
2432 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2433 gcc_assert (result);
2435 /* Move target1 and target2 to the memory locations indicated
2436 by op1 and op2. */
2437 emit_move_insn (op1, target1);
2438 emit_move_insn (op2, target2);
2440 return const0_rtx;
2443 /* Expand a call to the internal cexpi builtin to the sincos math function.
2444 EXP is the expression that is a call to the builtin function; if convenient,
2445 the result should be placed in TARGET. */
2447 static rtx
2448 expand_builtin_cexpi (tree exp, rtx target)
2450 tree fndecl = get_callee_fndecl (exp);
2451 tree arg, type;
2452 machine_mode mode;
2453 rtx op0, op1, op2;
2454 location_t loc = EXPR_LOCATION (exp);
2456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2457 return NULL_RTX;
2459 arg = CALL_EXPR_ARG (exp, 0);
2460 type = TREE_TYPE (arg);
2461 mode = TYPE_MODE (TREE_TYPE (arg));
2463 /* Try expanding via a sincos optab, fall back to emitting a libcall
2464 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2465 is only generated from sincos, cexp or if we have either of them. */
2466 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2468 op1 = gen_reg_rtx (mode);
2469 op2 = gen_reg_rtx (mode);
2471 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2473 /* Compute into op1 and op2. */
2474 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2476 else if (targetm.libc_has_function (function_sincos))
2478 tree call, fn = NULL_TREE;
2479 tree top1, top2;
2480 rtx op1a, op2a;
2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2483 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2485 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2487 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2488 else
2489 gcc_unreachable ();
2491 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2492 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2493 op1a = copy_addr_to_reg (XEXP (op1, 0));
2494 op2a = copy_addr_to_reg (XEXP (op2, 0));
2495 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2496 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2498 /* Make sure not to fold the sincos call again. */
2499 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2500 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2501 call, 3, arg, top1, top2));
2503 else
2505 tree call, fn = NULL_TREE, narg;
2506 tree ctype = build_complex_type (type);
2508 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2509 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2510 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2511 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2512 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2513 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2514 else
2515 gcc_unreachable ();
2517 /* If we don't have a decl for cexp create one. This is the
2518 friendliest fallback if the user calls __builtin_cexpi
2519 without full target C99 function support. */
2520 if (fn == NULL_TREE)
2522 tree fntype;
2523 const char *name = NULL;
2525 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2526 name = "cexpf";
2527 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2528 name = "cexp";
2529 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2530 name = "cexpl";
2532 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2533 fn = build_fn_decl (name, fntype);
2536 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2537 build_real (type, dconst0), arg);
2539 /* Make sure not to fold the cexp call again. */
2540 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2541 return expand_expr (build_call_nary (ctype, call, 1, narg),
2542 target, VOIDmode, EXPAND_NORMAL);
2545 /* Now build the proper return type. */
2546 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2547 make_tree (TREE_TYPE (arg), op2),
2548 make_tree (TREE_TYPE (arg), op1)),
2549 target, VOIDmode, EXPAND_NORMAL);
2552 /* Conveniently construct a function call expression. FNDECL names the
2553 function to be called, N is the number of arguments, and the "..."
2554 parameters are the argument expressions. Unlike build_call_exr
2555 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2557 static tree
2558 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2560 va_list ap;
2561 tree fntype = TREE_TYPE (fndecl);
2562 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2564 va_start (ap, n);
2565 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2566 va_end (ap);
2567 SET_EXPR_LOCATION (fn, loc);
2568 return fn;
2571 /* Expand a call to one of the builtin rounding functions gcc defines
2572 as an extension (lfloor and lceil). As these are gcc extensions we
2573 do not need to worry about setting errno to EDOM.
2574 If expanding via optab fails, lower expression to (int)(floor(x)).
2575 EXP is the expression that is a call to the builtin function;
2576 if convenient, the result should be placed in TARGET. */
2578 static rtx
2579 expand_builtin_int_roundingfn (tree exp, rtx target)
2581 convert_optab builtin_optab;
2582 rtx op0, tmp;
2583 rtx_insn *insns;
2584 tree fndecl = get_callee_fndecl (exp);
2585 enum built_in_function fallback_fn;
2586 tree fallback_fndecl;
2587 machine_mode mode;
2588 tree arg;
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 gcc_unreachable ();
2593 arg = CALL_EXPR_ARG (exp, 0);
2595 switch (DECL_FUNCTION_CODE (fndecl))
2597 CASE_FLT_FN (BUILT_IN_ICEIL):
2598 CASE_FLT_FN (BUILT_IN_LCEIL):
2599 CASE_FLT_FN (BUILT_IN_LLCEIL):
2600 builtin_optab = lceil_optab;
2601 fallback_fn = BUILT_IN_CEIL;
2602 break;
2604 CASE_FLT_FN (BUILT_IN_IFLOOR):
2605 CASE_FLT_FN (BUILT_IN_LFLOOR):
2606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2607 builtin_optab = lfloor_optab;
2608 fallback_fn = BUILT_IN_FLOOR;
2609 break;
2611 default:
2612 gcc_unreachable ();
2615 /* Make a suitable register to place result in. */
2616 mode = TYPE_MODE (TREE_TYPE (exp));
2618 target = gen_reg_rtx (mode);
2620 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2621 need to expand the argument again. This way, we will not perform
2622 side-effects more the once. */
2623 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2625 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2627 start_sequence ();
2629 /* Compute into TARGET. */
2630 if (expand_sfix_optab (target, op0, builtin_optab))
2632 /* Output the entire sequence. */
2633 insns = get_insns ();
2634 end_sequence ();
2635 emit_insn (insns);
2636 return target;
2639 /* If we were unable to expand via the builtin, stop the sequence
2640 (without outputting the insns). */
2641 end_sequence ();
2643 /* Fall back to floating point rounding optab. */
2644 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2646 /* For non-C99 targets we may end up without a fallback fndecl here
2647 if the user called __builtin_lfloor directly. In this case emit
2648 a call to the floor/ceil variants nevertheless. This should result
2649 in the best user experience for not full C99 targets. */
2650 if (fallback_fndecl == NULL_TREE)
2652 tree fntype;
2653 const char *name = NULL;
2655 switch (DECL_FUNCTION_CODE (fndecl))
2657 case BUILT_IN_ICEIL:
2658 case BUILT_IN_LCEIL:
2659 case BUILT_IN_LLCEIL:
2660 name = "ceil";
2661 break;
2662 case BUILT_IN_ICEILF:
2663 case BUILT_IN_LCEILF:
2664 case BUILT_IN_LLCEILF:
2665 name = "ceilf";
2666 break;
2667 case BUILT_IN_ICEILL:
2668 case BUILT_IN_LCEILL:
2669 case BUILT_IN_LLCEILL:
2670 name = "ceill";
2671 break;
2672 case BUILT_IN_IFLOOR:
2673 case BUILT_IN_LFLOOR:
2674 case BUILT_IN_LLFLOOR:
2675 name = "floor";
2676 break;
2677 case BUILT_IN_IFLOORF:
2678 case BUILT_IN_LFLOORF:
2679 case BUILT_IN_LLFLOORF:
2680 name = "floorf";
2681 break;
2682 case BUILT_IN_IFLOORL:
2683 case BUILT_IN_LFLOORL:
2684 case BUILT_IN_LLFLOORL:
2685 name = "floorl";
2686 break;
2687 default:
2688 gcc_unreachable ();
2691 fntype = build_function_type_list (TREE_TYPE (arg),
2692 TREE_TYPE (arg), NULL_TREE);
2693 fallback_fndecl = build_fn_decl (name, fntype);
2696 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2698 tmp = expand_normal (exp);
2699 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2701 /* Truncate the result of floating point optab to integer
2702 via expand_fix (). */
2703 target = gen_reg_rtx (mode);
2704 expand_fix (target, tmp, 0);
2706 return target;
2709 /* Expand a call to one of the builtin math functions doing integer
2710 conversion (lrint).
2711 Return 0 if a normal call should be emitted rather than expanding the
2712 function in-line. EXP is the expression that is a call to the builtin
2713 function; if convenient, the result should be placed in TARGET. */
2715 static rtx
2716 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2718 convert_optab builtin_optab;
2719 rtx op0;
2720 rtx_insn *insns;
2721 tree fndecl = get_callee_fndecl (exp);
2722 tree arg;
2723 machine_mode mode;
2724 enum built_in_function fallback_fn = BUILT_IN_NONE;
2726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2727 gcc_unreachable ();
2729 arg = CALL_EXPR_ARG (exp, 0);
2731 switch (DECL_FUNCTION_CODE (fndecl))
2733 CASE_FLT_FN (BUILT_IN_IRINT):
2734 fallback_fn = BUILT_IN_LRINT;
2735 gcc_fallthrough ();
2736 CASE_FLT_FN (BUILT_IN_LRINT):
2737 CASE_FLT_FN (BUILT_IN_LLRINT):
2738 builtin_optab = lrint_optab;
2739 break;
2741 CASE_FLT_FN (BUILT_IN_IROUND):
2742 fallback_fn = BUILT_IN_LROUND;
2743 gcc_fallthrough ();
2744 CASE_FLT_FN (BUILT_IN_LROUND):
2745 CASE_FLT_FN (BUILT_IN_LLROUND):
2746 builtin_optab = lround_optab;
2747 break;
2749 default:
2750 gcc_unreachable ();
2753 /* There's no easy way to detect the case we need to set EDOM. */
2754 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2755 return NULL_RTX;
2757 /* Make a suitable register to place result in. */
2758 mode = TYPE_MODE (TREE_TYPE (exp));
2760 /* There's no easy way to detect the case we need to set EDOM. */
2761 if (!flag_errno_math)
2763 rtx result = gen_reg_rtx (mode);
2765 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2766 need to expand the argument again. This way, we will not perform
2767 side-effects more the once. */
2768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2770 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2772 start_sequence ();
2774 if (expand_sfix_optab (result, op0, builtin_optab))
2776 /* Output the entire sequence. */
2777 insns = get_insns ();
2778 end_sequence ();
2779 emit_insn (insns);
2780 return result;
2783 /* If we were unable to expand via the builtin, stop the sequence
2784 (without outputting the insns) and call to the library function
2785 with the stabilized argument list. */
2786 end_sequence ();
2789 if (fallback_fn != BUILT_IN_NONE)
2791 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2792 targets, (int) round (x) should never be transformed into
2793 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2794 a call to lround in the hope that the target provides at least some
2795 C99 functions. This should result in the best user experience for
2796 not full C99 targets. */
2797 tree fallback_fndecl = mathfn_built_in_1
2798 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2801 fallback_fndecl, 1, arg);
2803 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2804 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2805 return convert_to_mode (mode, target, 0);
2808 return expand_call (exp, target, target == const0_rtx);
2811 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2812 a normal call should be emitted rather than expanding the function
2813 in-line. EXP is the expression that is a call to the builtin
2814 function; if convenient, the result should be placed in TARGET. */
2816 static rtx
2817 expand_builtin_powi (tree exp, rtx target)
2819 tree arg0, arg1;
2820 rtx op0, op1;
2821 machine_mode mode;
2822 machine_mode mode2;
2824 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2825 return NULL_RTX;
2827 arg0 = CALL_EXPR_ARG (exp, 0);
2828 arg1 = CALL_EXPR_ARG (exp, 1);
2829 mode = TYPE_MODE (TREE_TYPE (exp));
2831 /* Emit a libcall to libgcc. */
2833 /* Mode of the 2nd argument must match that of an int. */
2834 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2836 if (target == NULL_RTX)
2837 target = gen_reg_rtx (mode);
2839 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2840 if (GET_MODE (op0) != mode)
2841 op0 = convert_to_mode (mode, op0, 0);
2842 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2843 if (GET_MODE (op1) != mode2)
2844 op1 = convert_to_mode (mode2, op1, 0);
2846 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2847 target, LCT_CONST, mode,
2848 op0, mode, op1, mode2);
2850 return target;
2853 /* Expand expression EXP which is a call to the strlen builtin. Return
2854 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2855 try to get the result in TARGET, if convenient. */
2857 static rtx
2858 expand_builtin_strlen (tree exp, rtx target,
2859 machine_mode target_mode)
2861 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2862 return NULL_RTX;
2864 struct expand_operand ops[4];
2865 rtx pat;
2866 tree len;
2867 tree src = CALL_EXPR_ARG (exp, 0);
2868 rtx src_reg;
2869 rtx_insn *before_strlen;
2870 machine_mode insn_mode;
2871 enum insn_code icode = CODE_FOR_nothing;
2872 unsigned int align;
2874 /* If the length can be computed at compile-time, return it. */
2875 len = c_strlen (src, 0);
2876 if (len)
2877 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2879 /* If the length can be computed at compile-time and is constant
2880 integer, but there are side-effects in src, evaluate
2881 src for side-effects, then return len.
2882 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2883 can be optimized into: i++; x = 3; */
2884 len = c_strlen (src, 1);
2885 if (len && TREE_CODE (len) == INTEGER_CST)
2887 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2888 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2891 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2893 /* If SRC is not a pointer type, don't do this operation inline. */
2894 if (align == 0)
2895 return NULL_RTX;
2897 /* Bail out if we can't compute strlen in the right mode. */
2898 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2900 icode = optab_handler (strlen_optab, insn_mode);
2901 if (icode != CODE_FOR_nothing)
2902 break;
2904 if (insn_mode == VOIDmode)
2905 return NULL_RTX;
2907 /* Make a place to hold the source address. We will not expand
2908 the actual source until we are sure that the expansion will
2909 not fail -- there are trees that cannot be expanded twice. */
2910 src_reg = gen_reg_rtx (Pmode);
2912 /* Mark the beginning of the strlen sequence so we can emit the
2913 source operand later. */
2914 before_strlen = get_last_insn ();
2916 create_output_operand (&ops[0], target, insn_mode);
2917 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2918 create_integer_operand (&ops[2], 0);
2919 create_integer_operand (&ops[3], align);
2920 if (!maybe_expand_insn (icode, 4, ops))
2921 return NULL_RTX;
2923 /* Check to see if the argument was declared attribute nonstring
2924 and if so, issue a warning since at this point it's not known
2925 to be nul-terminated. */
2926 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2928 /* Now that we are assured of success, expand the source. */
2929 start_sequence ();
2930 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2931 if (pat != src_reg)
2933 #ifdef POINTERS_EXTEND_UNSIGNED
2934 if (GET_MODE (pat) != Pmode)
2935 pat = convert_to_mode (Pmode, pat,
2936 POINTERS_EXTEND_UNSIGNED);
2937 #endif
2938 emit_move_insn (src_reg, pat);
2940 pat = get_insns ();
2941 end_sequence ();
2943 if (before_strlen)
2944 emit_insn_after (pat, before_strlen);
2945 else
2946 emit_insn_before (pat, get_insns ());
2948 /* Return the value in the proper mode for this function. */
2949 if (GET_MODE (ops[0].value) == target_mode)
2950 target = ops[0].value;
2951 else if (target != 0)
2952 convert_move (target, ops[0].value, 0);
2953 else
2954 target = convert_to_mode (target_mode, ops[0].value, 0);
2956 return target;
2959 /* Expand call EXP to the strnlen built-in, returning the result
2960 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2962 static rtx
2963 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2965 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2968 tree src = CALL_EXPR_ARG (exp, 0);
2969 tree bound = CALL_EXPR_ARG (exp, 1);
2971 if (!bound)
2972 return NULL_RTX;
2974 location_t loc = UNKNOWN_LOCATION;
2975 if (EXPR_HAS_LOCATION (exp))
2976 loc = EXPR_LOCATION (exp);
2978 tree maxobjsize = max_object_size ();
2979 tree func = get_callee_fndecl (exp);
2981 tree len = c_strlen (src, 0);
2983 if (TREE_CODE (bound) == INTEGER_CST)
2985 if (!TREE_NO_WARNING (exp)
2986 && tree_int_cst_lt (maxobjsize, bound)
2987 && warning_at (loc, OPT_Wstringop_overflow_,
2988 "%K%qD specified bound %E "
2989 "exceeds maximum object size %E",
2990 exp, func, bound, maxobjsize))
2991 TREE_NO_WARNING (exp) = true;
2993 if (!len || TREE_CODE (len) != INTEGER_CST)
2994 return NULL_RTX;
2996 len = fold_convert_loc (loc, size_type_node, len);
2997 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2998 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3001 if (TREE_CODE (bound) != SSA_NAME)
3002 return NULL_RTX;
3004 wide_int min, max;
3005 enum value_range_type rng = get_range_info (bound, &min, &max);
3006 if (rng != VR_RANGE)
3007 return NULL_RTX;
3009 if (!TREE_NO_WARNING (exp)
3010 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3011 && warning_at (loc, OPT_Wstringop_overflow_,
3012 "%K%qD specified bound [%wu, %wu] "
3013 "exceeds maximum object size %E",
3014 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3015 TREE_NO_WARNING (exp) = true;
3017 if (!len || TREE_CODE (len) != INTEGER_CST)
3018 return NULL_RTX;
3020 if (wi::gtu_p (min, wi::to_wide (len)))
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3024 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3027 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3028 bytes from constant string DATA + OFFSET and return it as target
3029 constant. */
3031 static rtx
3032 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3033 scalar_int_mode mode)
3035 const char *str = (const char *) data;
3037 gcc_assert (offset >= 0
3038 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3039 <= strlen (str) + 1));
3041 return c_readstr (str + offset, mode);
3044 /* LEN specify length of the block of memcpy/memset operation.
3045 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3046 In some cases we can make very likely guess on max size, then we
3047 set it into PROBABLE_MAX_SIZE. */
3049 static void
3050 determine_block_size (tree len, rtx len_rtx,
3051 unsigned HOST_WIDE_INT *min_size,
3052 unsigned HOST_WIDE_INT *max_size,
3053 unsigned HOST_WIDE_INT *probable_max_size)
3055 if (CONST_INT_P (len_rtx))
3057 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3058 return;
3060 else
3062 wide_int min, max;
3063 enum value_range_type range_type = VR_UNDEFINED;
3065 /* Determine bounds from the type. */
3066 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3067 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3068 else
3069 *min_size = 0;
3070 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3071 *probable_max_size = *max_size
3072 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3073 else
3074 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3076 if (TREE_CODE (len) == SSA_NAME)
3077 range_type = get_range_info (len, &min, &max);
3078 if (range_type == VR_RANGE)
3080 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3081 *min_size = min.to_uhwi ();
3082 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3083 *probable_max_size = *max_size = max.to_uhwi ();
3085 else if (range_type == VR_ANTI_RANGE)
3087 /* Anti range 0...N lets us to determine minimal size to N+1. */
3088 if (min == 0)
3090 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3091 *min_size = max.to_uhwi () + 1;
3093 /* Code like
3095 int n;
3096 if (n < 100)
3097 memcpy (a, b, n)
3099 Produce anti range allowing negative values of N. We still
3100 can use the information and make a guess that N is not negative.
3102 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3103 *probable_max_size = min.to_uhwi () - 1;
3106 gcc_checking_assert (*max_size <=
3107 (unsigned HOST_WIDE_INT)
3108 GET_MODE_MASK (GET_MODE (len_rtx)));
3111 /* Try to verify that the sizes and lengths of the arguments to a string
3112 manipulation function given by EXP are within valid bounds and that
3113 the operation does not lead to buffer overflow or read past the end.
3114 Arguments other than EXP may be null. When non-null, the arguments
3115 have the following meaning:
3116 DST is the destination of a copy call or NULL otherwise.
3117 SRC is the source of a copy call or NULL otherwise.
3118 DSTWRITE is the number of bytes written into the destination obtained
3119 from the user-supplied size argument to the function (such as in
3120 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3121 MAXREAD is the user-supplied bound on the length of the source sequence
3122 (such as in strncat(d, s, N). It specifies the upper limit on the number
3123 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3124 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3125 expression EXP is a string function call (as opposed to a memory call
3126 like memcpy). As an exception, SRCSTR can also be an integer denoting
3127 the precomputed size of the source string or object (for functions like
3128 memcpy).
3129 DSTSIZE is the size of the destination object specified by the last
3130 argument to the _chk builtins, typically resulting from the expansion
3131 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3132 DSTSIZE).
3134 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3135 SIZE_MAX.
3137 If the call is successfully verified as safe return true, otherwise
3138 return false. */
3140 static bool
3141 check_access (tree exp, tree, tree, tree dstwrite,
3142 tree maxread, tree srcstr, tree dstsize)
3144 int opt = OPT_Wstringop_overflow_;
3146 /* The size of the largest object is half the address space, or
3147 PTRDIFF_MAX. (This is way too permissive.) */
3148 tree maxobjsize = max_object_size ();
3150 /* Either the length of the source string for string functions or
3151 the size of the source object for raw memory functions. */
3152 tree slen = NULL_TREE;
3154 tree range[2] = { NULL_TREE, NULL_TREE };
3156 /* Set to true when the exact number of bytes written by a string
3157 function like strcpy is not known and the only thing that is
3158 known is that it must be at least one (for the terminating nul). */
3159 bool at_least_one = false;
3160 if (srcstr)
3162 /* SRCSTR is normally a pointer to string but as a special case
3163 it can be an integer denoting the length of a string. */
3164 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3166 /* Try to determine the range of lengths the source string
3167 refers to. If it can be determined and is less than
3168 the upper bound given by MAXREAD add one to it for
3169 the terminating nul. Otherwise, set it to one for
3170 the same reason, or to MAXREAD as appropriate. */
3171 get_range_strlen (srcstr, range);
3172 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3174 if (maxread && tree_int_cst_le (maxread, range[0]))
3175 range[0] = range[1] = maxread;
3176 else
3177 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3178 range[0], size_one_node);
3180 if (maxread && tree_int_cst_le (maxread, range[1]))
3181 range[1] = maxread;
3182 else if (!integer_all_onesp (range[1]))
3183 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3184 range[1], size_one_node);
3186 slen = range[0];
3188 else
3190 at_least_one = true;
3191 slen = size_one_node;
3194 else
3195 slen = srcstr;
3198 if (!dstwrite && !maxread)
3200 /* When the only available piece of data is the object size
3201 there is nothing to do. */
3202 if (!slen)
3203 return true;
3205 /* Otherwise, when the length of the source sequence is known
3206 (as with strlen), set DSTWRITE to it. */
3207 if (!range[0])
3208 dstwrite = slen;
3211 if (!dstsize)
3212 dstsize = maxobjsize;
3214 if (dstwrite)
3215 get_size_range (dstwrite, range);
3217 tree func = get_callee_fndecl (exp);
3219 /* First check the number of bytes to be written against the maximum
3220 object size. */
3221 if (range[0]
3222 && TREE_CODE (range[0]) == INTEGER_CST
3223 && tree_int_cst_lt (maxobjsize, range[0]))
3225 if (TREE_NO_WARNING (exp))
3226 return false;
3228 location_t loc = tree_nonartificial_location (exp);
3229 loc = expansion_point_location_if_in_system_header (loc);
3231 bool warned;
3232 if (range[0] == range[1])
3233 warned = warning_at (loc, opt,
3234 "%K%qD specified size %E "
3235 "exceeds maximum object size %E",
3236 exp, func, range[0], maxobjsize);
3237 else
3238 warned = warning_at (loc, opt,
3239 "%K%qD specified size between %E and %E "
3240 "exceeds maximum object size %E",
3241 exp, func,
3242 range[0], range[1], maxobjsize);
3243 if (warned)
3244 TREE_NO_WARNING (exp) = true;
3246 return false;
3249 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3250 constant, and in range of unsigned HOST_WIDE_INT. */
3251 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3253 /* Next check the number of bytes to be written against the destination
3254 object size. */
3255 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3257 if (range[0]
3258 && TREE_CODE (range[0]) == INTEGER_CST
3259 && ((tree_fits_uhwi_p (dstsize)
3260 && tree_int_cst_lt (dstsize, range[0]))
3261 || (dstwrite
3262 && tree_fits_uhwi_p (dstwrite)
3263 && tree_int_cst_lt (dstwrite, range[0]))))
3265 if (TREE_NO_WARNING (exp))
3266 return false;
3268 location_t loc = tree_nonartificial_location (exp);
3269 loc = expansion_point_location_if_in_system_header (loc);
3271 if (dstwrite == slen && at_least_one)
3273 /* This is a call to strcpy with a destination of 0 size
3274 and a source of unknown length. The call will write
3275 at least one byte past the end of the destination. */
3276 warning_at (loc, opt,
3277 "%K%qD writing %E or more bytes into a region "
3278 "of size %E overflows the destination",
3279 exp, func, range[0], dstsize);
3281 else if (tree_int_cst_equal (range[0], range[1]))
3282 warning_n (loc, opt, tree_to_uhwi (range[0]),
3283 "%K%qD writing %E byte into a region "
3284 "of size %E overflows the destination",
3285 "%K%qD writing %E bytes into a region "
3286 "of size %E overflows the destination",
3287 exp, func, range[0], dstsize);
3288 else if (tree_int_cst_sign_bit (range[1]))
3290 /* Avoid printing the upper bound if it's invalid. */
3291 warning_at (loc, opt,
3292 "%K%qD writing %E or more bytes into a region "
3293 "of size %E overflows the destination",
3294 exp, func, range[0], dstsize);
3296 else
3297 warning_at (loc, opt,
3298 "%K%qD writing between %E and %E bytes into "
3299 "a region of size %E overflows the destination",
3300 exp, func, range[0], range[1],
3301 dstsize);
3303 /* Return error when an overflow has been detected. */
3304 return false;
3308 /* Check the maximum length of the source sequence against the size
3309 of the destination object if known, or against the maximum size
3310 of an object. */
3311 if (maxread)
3313 get_size_range (maxread, range);
3315 /* Use the lower end for MAXREAD from now on. */
3316 if (range[0])
3317 maxread = range[0];
3319 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3321 location_t loc = tree_nonartificial_location (exp);
3322 loc = expansion_point_location_if_in_system_header (loc);
3324 if (tree_int_cst_lt (maxobjsize, range[0]))
3326 if (TREE_NO_WARNING (exp))
3327 return false;
3329 /* Warn about crazy big sizes first since that's more
3330 likely to be meaningful than saying that the bound
3331 is greater than the object size if both are big. */
3332 if (range[0] == range[1])
3333 warning_at (loc, opt,
3334 "%K%qD specified bound %E "
3335 "exceeds maximum object size %E",
3336 exp, func,
3337 range[0], maxobjsize);
3338 else
3339 warning_at (loc, opt,
3340 "%K%qD specified bound between %E and %E "
3341 "exceeds maximum object size %E",
3342 exp, func,
3343 range[0], range[1], maxobjsize);
3345 return false;
3348 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3350 if (TREE_NO_WARNING (exp))
3351 return false;
3353 if (tree_int_cst_equal (range[0], range[1]))
3354 warning_at (loc, opt,
3355 "%K%qD specified bound %E "
3356 "exceeds destination size %E",
3357 exp, func,
3358 range[0], dstsize);
3359 else
3360 warning_at (loc, opt,
3361 "%K%qD specified bound between %E and %E "
3362 "exceeds destination size %E",
3363 exp, func,
3364 range[0], range[1], dstsize);
3365 return false;
3370 /* Check for reading past the end of SRC. */
3371 if (slen
3372 && slen == srcstr
3373 && dstwrite && range[0]
3374 && tree_int_cst_lt (slen, range[0]))
3376 if (TREE_NO_WARNING (exp))
3377 return false;
3379 location_t loc = tree_nonartificial_location (exp);
3381 if (tree_int_cst_equal (range[0], range[1]))
3382 warning_n (loc, opt, tree_to_uhwi (range[0]),
3383 "%K%qD reading %E byte from a region of size %E",
3384 "%K%qD reading %E bytes from a region of size %E",
3385 exp, func, range[0], slen);
3386 else if (tree_int_cst_sign_bit (range[1]))
3388 /* Avoid printing the upper bound if it's invalid. */
3389 warning_at (loc, opt,
3390 "%K%qD reading %E or more bytes from a region "
3391 "of size %E",
3392 exp, func, range[0], slen);
3394 else
3395 warning_at (loc, opt,
3396 "%K%qD reading between %E and %E bytes from a region "
3397 "of size %E",
3398 exp, func, range[0], range[1], slen);
3399 return false;
3402 return true;
3405 /* Helper to compute the size of the object referenced by the DEST
3406 expression which must have pointer type, using Object Size type
3407 OSTYPE (only the least significant 2 bits are used). Return
3408 an estimate of the size of the object if successful or NULL when
3409 the size cannot be determined. When the referenced object involves
3410 a non-constant offset in some range the returned value represents
3411 the largest size given the smallest non-negative offset in the
3412 range. The function is intended for diagnostics and should not
3413 be used to influence code generation or optimization. */
3415 tree
3416 compute_objsize (tree dest, int ostype)
3418 unsigned HOST_WIDE_INT size;
3420 /* Only the two least significant bits are meaningful. */
3421 ostype &= 3;
3423 if (compute_builtin_object_size (dest, ostype, &size))
3424 return build_int_cst (sizetype, size);
3426 if (TREE_CODE (dest) == SSA_NAME)
3428 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3429 if (!is_gimple_assign (stmt))
3430 return NULL_TREE;
3432 dest = gimple_assign_rhs1 (stmt);
3434 tree_code code = gimple_assign_rhs_code (stmt);
3435 if (code == POINTER_PLUS_EXPR)
3437 /* compute_builtin_object_size fails for addresses with
3438 non-constant offsets. Try to determine the range of
3439 such an offset here and use it to adjust the constant
3440 size. */
3441 tree off = gimple_assign_rhs2 (stmt);
3442 if (TREE_CODE (off) == INTEGER_CST)
3444 if (tree size = compute_objsize (dest, ostype))
3446 wide_int wioff = wi::to_wide (off);
3447 wide_int wisiz = wi::to_wide (size);
3449 /* Ignore negative offsets for now. For others,
3450 use the lower bound as the most optimistic
3451 estimate of the (remaining) size. */
3452 if (wi::sign_mask (wioff))
3454 else if (wi::ltu_p (wioff, wisiz))
3455 return wide_int_to_tree (TREE_TYPE (size),
3456 wi::sub (wisiz, wioff));
3457 else
3458 return size_zero_node;
3461 else if (TREE_CODE (off) == SSA_NAME
3462 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3464 wide_int min, max;
3465 enum value_range_type rng = get_range_info (off, &min, &max);
3467 if (rng == VR_RANGE)
3469 if (tree size = compute_objsize (dest, ostype))
3471 wide_int wisiz = wi::to_wide (size);
3473 /* Ignore negative offsets for now. For others,
3474 use the lower bound as the most optimistic
3475 estimate of the (remaining)size. */
3476 if (wi::sign_mask (min))
3478 else if (wi::ltu_p (min, wisiz))
3479 return wide_int_to_tree (TREE_TYPE (size),
3480 wi::sub (wisiz, min));
3481 else
3482 return size_zero_node;
3487 else if (code != ADDR_EXPR)
3488 return NULL_TREE;
3491 /* Unless computing the largest size (for memcpy and other raw memory
3492 functions), try to determine the size of the object from its type. */
3493 if (!ostype)
3494 return NULL_TREE;
3496 if (TREE_CODE (dest) != ADDR_EXPR)
3497 return NULL_TREE;
3499 tree type = TREE_TYPE (dest);
3500 if (TREE_CODE (type) == POINTER_TYPE)
3501 type = TREE_TYPE (type);
3503 type = TYPE_MAIN_VARIANT (type);
3505 if (TREE_CODE (type) == ARRAY_TYPE
3506 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3508 /* Return the constant size unless it's zero (that's a zero-length
3509 array likely at the end of a struct). */
3510 tree size = TYPE_SIZE_UNIT (type);
3511 if (size && TREE_CODE (size) == INTEGER_CST
3512 && !integer_zerop (size))
3513 return size;
3516 return NULL_TREE;
3519 /* Helper to determine and check the sizes of the source and the destination
3520 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3521 call expression, DEST is the destination argument, SRC is the source
3522 argument or null, and LEN is the number of bytes. Use Object Size type-0
3523 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3524 (no overflow or invalid sizes), false otherwise. */
3526 static bool
3527 check_memop_access (tree exp, tree dest, tree src, tree size)
3529 /* For functions like memset and memcpy that operate on raw memory
3530 try to determine the size of the largest source and destination
3531 object using type-0 Object Size regardless of the object size
3532 type specified by the option. */
3533 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3534 tree dstsize = compute_objsize (dest, 0);
3536 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3537 srcsize, dstsize);
3540 /* Validate memchr arguments without performing any expansion.
3541 Return NULL_RTX. */
3543 static rtx
3544 expand_builtin_memchr (tree exp, rtx)
3546 if (!validate_arglist (exp,
3547 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3548 return NULL_RTX;
3550 tree arg1 = CALL_EXPR_ARG (exp, 0);
3551 tree len = CALL_EXPR_ARG (exp, 2);
3553 /* Diagnose calls where the specified length exceeds the size
3554 of the object. */
3555 if (warn_stringop_overflow)
3557 tree size = compute_objsize (arg1, 0);
3558 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3559 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3562 return NULL_RTX;
3565 /* Expand a call EXP to the memcpy builtin.
3566 Return NULL_RTX if we failed, the caller should emit a normal call,
3567 otherwise try to get the result in TARGET, if convenient (and in
3568 mode MODE if that's convenient). */
3570 static rtx
3571 expand_builtin_memcpy (tree exp, rtx target)
3573 if (!validate_arglist (exp,
3574 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 return NULL_RTX;
3577 tree dest = CALL_EXPR_ARG (exp, 0);
3578 tree src = CALL_EXPR_ARG (exp, 1);
3579 tree len = CALL_EXPR_ARG (exp, 2);
3581 check_memop_access (exp, dest, src, len);
3583 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3584 /*endp=*/ 0);
3587 /* Check a call EXP to the memmove built-in for validity.
3588 Return NULL_RTX on both success and failure. */
3590 static rtx
3591 expand_builtin_memmove (tree exp, rtx)
3593 if (!validate_arglist (exp,
3594 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3595 return NULL_RTX;
3597 tree dest = CALL_EXPR_ARG (exp, 0);
3598 tree src = CALL_EXPR_ARG (exp, 1);
3599 tree len = CALL_EXPR_ARG (exp, 2);
3601 check_memop_access (exp, dest, src, len);
3603 return NULL_RTX;
3606 /* Expand a call EXP to the mempcpy builtin.
3607 Return NULL_RTX if we failed; the caller should emit a normal call,
3608 otherwise try to get the result in TARGET, if convenient (and in
3609 mode MODE if that's convenient). If ENDP is 0 return the
3610 destination pointer, if ENDP is 1 return the end pointer ala
3611 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3612 stpcpy. */
3614 static rtx
3615 expand_builtin_mempcpy (tree exp, rtx target)
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
3625 /* Policy does not generally allow using compute_objsize (which
3626 is used internally by check_memop_size) to change code generation
3627 or drive optimization decisions.
3629 In this instance it is safe because the code we generate has
3630 the same semantics regardless of the return value of
3631 check_memop_sizes. Exactly the same amount of data is copied
3632 and the return value is exactly the same in both cases.
3634 Furthermore, check_memop_size always uses mode 0 for the call to
3635 compute_objsize, so the imprecise nature of compute_objsize is
3636 avoided. */
3638 /* Avoid expanding mempcpy into memcpy when the call is determined
3639 to overflow the buffer. This also prevents the same overflow
3640 from being diagnosed again when expanding memcpy. */
3641 if (!check_memop_access (exp, dest, src, len))
3642 return NULL_RTX;
3644 return expand_builtin_mempcpy_args (dest, src, len,
3645 target, exp, /*endp=*/ 1);
3648 /* Helper function to do the actual work for expand of memory copy family
3649 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3650 of memory from SRC to DEST and assign to TARGET if convenient.
3651 If ENDP is 0 return the
3652 destination pointer, if ENDP is 1 return the end pointer ala
3653 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3654 stpcpy. */
3656 static rtx
3657 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3658 rtx target, tree exp, int endp)
3660 const char *src_str;
3661 unsigned int src_align = get_pointer_alignment (src);
3662 unsigned int dest_align = get_pointer_alignment (dest);
3663 rtx dest_mem, src_mem, dest_addr, len_rtx;
3664 HOST_WIDE_INT expected_size = -1;
3665 unsigned int expected_align = 0;
3666 unsigned HOST_WIDE_INT min_size;
3667 unsigned HOST_WIDE_INT max_size;
3668 unsigned HOST_WIDE_INT probable_max_size;
3670 /* If DEST is not a pointer type, call the normal function. */
3671 if (dest_align == 0)
3672 return NULL_RTX;
3674 /* If either SRC is not a pointer type, don't do this
3675 operation in-line. */
3676 if (src_align == 0)
3677 return NULL_RTX;
3679 if (currently_expanding_gimple_stmt)
3680 stringop_block_profile (currently_expanding_gimple_stmt,
3681 &expected_align, &expected_size);
3683 if (expected_align < dest_align)
3684 expected_align = dest_align;
3685 dest_mem = get_memory_rtx (dest, len);
3686 set_mem_align (dest_mem, dest_align);
3687 len_rtx = expand_normal (len);
3688 determine_block_size (len, len_rtx, &min_size, &max_size,
3689 &probable_max_size);
3690 src_str = c_getstr (src);
3692 /* If SRC is a string constant and block move would be done
3693 by pieces, we can avoid loading the string from memory
3694 and only stored the computed constants. */
3695 if (src_str
3696 && CONST_INT_P (len_rtx)
3697 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3698 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3699 CONST_CAST (char *, src_str),
3700 dest_align, false))
3702 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3703 builtin_memcpy_read_str,
3704 CONST_CAST (char *, src_str),
3705 dest_align, false, endp);
3706 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3707 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3708 return dest_mem;
3711 src_mem = get_memory_rtx (src, len);
3712 set_mem_align (src_mem, src_align);
3714 /* Copy word part most expediently. */
3715 enum block_op_methods method = BLOCK_OP_NORMAL;
3716 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3717 method = BLOCK_OP_TAILCALL;
3718 if (endp == 1 && target != const0_rtx)
3719 method = BLOCK_OP_NO_LIBCALL_RET;
3720 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3721 expected_align, expected_size,
3722 min_size, max_size, probable_max_size);
3723 if (dest_addr == pc_rtx)
3724 return NULL_RTX;
3726 if (dest_addr == 0)
3728 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3729 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3732 if (endp && target != const0_rtx)
3734 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3735 /* stpcpy pointer to last byte. */
3736 if (endp == 2)
3737 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3740 return dest_addr;
3743 static rtx
3744 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3745 rtx target, tree orig_exp, int endp)
3747 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3748 endp);
3751 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3752 we failed, the caller should emit a normal call, otherwise try to
3753 get the result in TARGET, if convenient. If ENDP is 0 return the
3754 destination pointer, if ENDP is 1 return the end pointer ala
3755 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3756 stpcpy. */
3758 static rtx
3759 expand_movstr (tree dest, tree src, rtx target, int endp)
3761 struct expand_operand ops[3];
3762 rtx dest_mem;
3763 rtx src_mem;
3765 if (!targetm.have_movstr ())
3766 return NULL_RTX;
3768 dest_mem = get_memory_rtx (dest, NULL);
3769 src_mem = get_memory_rtx (src, NULL);
3770 if (!endp)
3772 target = force_reg (Pmode, XEXP (dest_mem, 0));
3773 dest_mem = replace_equiv_address (dest_mem, target);
3776 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3777 create_fixed_operand (&ops[1], dest_mem);
3778 create_fixed_operand (&ops[2], src_mem);
3779 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3780 return NULL_RTX;
3782 if (endp && target != const0_rtx)
3784 target = ops[0].value;
3785 /* movstr is supposed to set end to the address of the NUL
3786 terminator. If the caller requested a mempcpy-like return value,
3787 adjust it. */
3788 if (endp == 1)
3790 rtx tem = plus_constant (GET_MODE (target),
3791 gen_lowpart (GET_MODE (target), target), 1);
3792 emit_move_insn (target, force_operand (tem, NULL_RTX));
3795 return target;
3798 /* Do some very basic size validation of a call to the strcpy builtin
3799 given by EXP. Return NULL_RTX to have the built-in expand to a call
3800 to the library function. */
3802 static rtx
3803 expand_builtin_strcat (tree exp, rtx)
3805 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3806 || !warn_stringop_overflow)
3807 return NULL_RTX;
3809 tree dest = CALL_EXPR_ARG (exp, 0);
3810 tree src = CALL_EXPR_ARG (exp, 1);
3812 /* There is no way here to determine the length of the string in
3813 the destination to which the SRC string is being appended so
3814 just diagnose cases when the souce string is longer than
3815 the destination object. */
3817 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3819 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3820 destsize);
3822 return NULL_RTX;
3825 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3826 NULL_RTX if we failed the caller should emit a normal call, otherwise
3827 try to get the result in TARGET, if convenient (and in mode MODE if that's
3828 convenient). */
3830 static rtx
3831 expand_builtin_strcpy (tree exp, rtx target)
3833 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3834 return NULL_RTX;
3836 tree dest = CALL_EXPR_ARG (exp, 0);
3837 tree src = CALL_EXPR_ARG (exp, 1);
3839 if (warn_stringop_overflow)
3841 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3842 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3843 src, destsize);
3846 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3848 /* Check to see if the argument was declared attribute nonstring
3849 and if so, issue a warning since at this point it's not known
3850 to be nul-terminated. */
3851 tree fndecl = get_callee_fndecl (exp);
3852 maybe_warn_nonstring_arg (fndecl, exp);
3853 return ret;
3856 return NULL_RTX;
3859 /* Helper function to do the actual work for expand_builtin_strcpy. The
3860 arguments to the builtin_strcpy call DEST and SRC are broken out
3861 so that this can also be called without constructing an actual CALL_EXPR.
3862 The other arguments and return value are the same as for
3863 expand_builtin_strcpy. */
3865 static rtx
3866 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3868 return expand_movstr (dest, src, target, /*endp=*/0);
3871 /* Expand a call EXP to the stpcpy builtin.
3872 Return NULL_RTX if we failed the caller should emit a normal call,
3873 otherwise try to get the result in TARGET, if convenient (and in
3874 mode MODE if that's convenient). */
3876 static rtx
3877 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3879 tree dst, src;
3880 location_t loc = EXPR_LOCATION (exp);
3882 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3883 return NULL_RTX;
3885 dst = CALL_EXPR_ARG (exp, 0);
3886 src = CALL_EXPR_ARG (exp, 1);
3888 if (warn_stringop_overflow)
3890 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3891 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3892 src, destsize);
3895 /* If return value is ignored, transform stpcpy into strcpy. */
3896 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3898 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3899 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3900 return expand_expr (result, target, mode, EXPAND_NORMAL);
3902 else
3904 tree len, lenp1;
3905 rtx ret;
3907 /* Ensure we get an actual string whose length can be evaluated at
3908 compile-time, not an expression containing a string. This is
3909 because the latter will potentially produce pessimized code
3910 when used to produce the return value. */
3911 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3912 return expand_movstr (dst, src, target, /*endp=*/2);
3914 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3915 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3916 target, exp, /*endp=*/2);
3918 if (ret)
3919 return ret;
3921 if (TREE_CODE (len) == INTEGER_CST)
3923 rtx len_rtx = expand_normal (len);
3925 if (CONST_INT_P (len_rtx))
3927 ret = expand_builtin_strcpy_args (dst, src, target);
3929 if (ret)
3931 if (! target)
3933 if (mode != VOIDmode)
3934 target = gen_reg_rtx (mode);
3935 else
3936 target = gen_reg_rtx (GET_MODE (ret));
3938 if (GET_MODE (target) != GET_MODE (ret))
3939 ret = gen_lowpart (GET_MODE (target), ret);
3941 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3942 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3943 gcc_assert (ret);
3945 return target;
3950 return expand_movstr (dst, src, target, /*endp=*/2);
3954 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3955 arguments while being careful to avoid duplicate warnings (which could
3956 be issued if the expander were to expand the call, resulting in it
3957 being emitted in expand_call(). */
3959 static rtx
3960 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3962 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3964 /* The call has been successfully expanded. Check for nonstring
3965 arguments and issue warnings as appropriate. */
3966 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3967 return ret;
3970 return NULL_RTX;
3973 /* Check a call EXP to the stpncpy built-in for validity.
3974 Return NULL_RTX on both success and failure. */
3976 static rtx
3977 expand_builtin_stpncpy (tree exp, rtx)
3979 if (!validate_arglist (exp,
3980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3981 || !warn_stringop_overflow)
3982 return NULL_RTX;
3984 /* The source and destination of the call. */
3985 tree dest = CALL_EXPR_ARG (exp, 0);
3986 tree src = CALL_EXPR_ARG (exp, 1);
3988 /* The exact number of bytes to write (not the maximum). */
3989 tree len = CALL_EXPR_ARG (exp, 2);
3991 /* The size of the destination object. */
3992 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3994 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3996 return NULL_RTX;
3999 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4000 bytes from constant string DATA + OFFSET and return it as target
4001 constant. */
4004 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4005 scalar_int_mode mode)
4007 const char *str = (const char *) data;
4009 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4010 return const0_rtx;
4012 return c_readstr (str + offset, mode);
4015 /* Helper to check the sizes of sequences and the destination of calls
4016 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4017 success (no overflow or invalid sizes), false otherwise. */
4019 static bool
4020 check_strncat_sizes (tree exp, tree objsize)
4022 tree dest = CALL_EXPR_ARG (exp, 0);
4023 tree src = CALL_EXPR_ARG (exp, 1);
4024 tree maxread = CALL_EXPR_ARG (exp, 2);
4026 /* Try to determine the range of lengths that the source expression
4027 refers to. */
4028 tree lenrange[2];
4029 get_range_strlen (src, lenrange);
4031 /* Try to verify that the destination is big enough for the shortest
4032 string. */
4034 if (!objsize && warn_stringop_overflow)
4036 /* If it hasn't been provided by __strncat_chk, try to determine
4037 the size of the destination object into which the source is
4038 being copied. */
4039 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4042 /* Add one for the terminating nul. */
4043 tree srclen = (lenrange[0]
4044 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4045 size_one_node)
4046 : NULL_TREE);
4048 /* The strncat function copies at most MAXREAD bytes and always appends
4049 the terminating nul so the specified upper bound should never be equal
4050 to (or greater than) the size of the destination. */
4051 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4052 && tree_int_cst_equal (objsize, maxread))
4054 location_t loc = tree_nonartificial_location (exp);
4055 loc = expansion_point_location_if_in_system_header (loc);
4057 warning_at (loc, OPT_Wstringop_overflow_,
4058 "%K%qD specified bound %E equals destination size",
4059 exp, get_callee_fndecl (exp), maxread);
4061 return false;
4064 if (!srclen
4065 || (maxread && tree_fits_uhwi_p (maxread)
4066 && tree_fits_uhwi_p (srclen)
4067 && tree_int_cst_lt (maxread, srclen)))
4068 srclen = maxread;
4070 /* The number of bytes to write is LEN but check_access will also
4071 check SRCLEN if LEN's value isn't known. */
4072 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4073 objsize);
4076 /* Similar to expand_builtin_strcat, do some very basic size validation
4077 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4078 the built-in expand to a call to the library function. */
4080 static rtx
4081 expand_builtin_strncat (tree exp, rtx)
4083 if (!validate_arglist (exp,
4084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4085 || !warn_stringop_overflow)
4086 return NULL_RTX;
4088 tree dest = CALL_EXPR_ARG (exp, 0);
4089 tree src = CALL_EXPR_ARG (exp, 1);
4090 /* The upper bound on the number of bytes to write. */
4091 tree maxread = CALL_EXPR_ARG (exp, 2);
4092 /* The length of the source sequence. */
4093 tree slen = c_strlen (src, 1);
4095 /* Try to determine the range of lengths that the source expression
4096 refers to. */
4097 tree lenrange[2];
4098 if (slen)
4099 lenrange[0] = lenrange[1] = slen;
4100 else
4101 get_range_strlen (src, lenrange);
4103 /* Try to verify that the destination is big enough for the shortest
4104 string. First try to determine the size of the destination object
4105 into which the source is being copied. */
4106 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4108 /* Add one for the terminating nul. */
4109 tree srclen = (lenrange[0]
4110 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4111 size_one_node)
4112 : NULL_TREE);
4114 /* The strncat function copies at most MAXREAD bytes and always appends
4115 the terminating nul so the specified upper bound should never be equal
4116 to (or greater than) the size of the destination. */
4117 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4118 && tree_int_cst_equal (destsize, maxread))
4120 location_t loc = tree_nonartificial_location (exp);
4121 loc = expansion_point_location_if_in_system_header (loc);
4123 warning_at (loc, OPT_Wstringop_overflow_,
4124 "%K%qD specified bound %E equals destination size",
4125 exp, get_callee_fndecl (exp), maxread);
4127 return NULL_RTX;
4130 if (!srclen
4131 || (maxread && tree_fits_uhwi_p (maxread)
4132 && tree_fits_uhwi_p (srclen)
4133 && tree_int_cst_lt (maxread, srclen)))
4134 srclen = maxread;
4136 /* The number of bytes to write is SRCLEN. */
4137 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4139 return NULL_RTX;
4142 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4143 NULL_RTX if we failed the caller should emit a normal call. */
4145 static rtx
4146 expand_builtin_strncpy (tree exp, rtx target)
4148 location_t loc = EXPR_LOCATION (exp);
4150 if (validate_arglist (exp,
4151 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4153 tree dest = CALL_EXPR_ARG (exp, 0);
4154 tree src = CALL_EXPR_ARG (exp, 1);
4155 /* The number of bytes to write (not the maximum). */
4156 tree len = CALL_EXPR_ARG (exp, 2);
4157 /* The length of the source sequence. */
4158 tree slen = c_strlen (src, 1);
4160 if (warn_stringop_overflow)
4162 tree destsize = compute_objsize (dest,
4163 warn_stringop_overflow - 1);
4165 /* The number of bytes to write is LEN but check_access will also
4166 check SLEN if LEN's value isn't known. */
4167 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4168 destsize);
4171 /* We must be passed a constant len and src parameter. */
4172 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4173 return NULL_RTX;
4175 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4177 /* We're required to pad with trailing zeros if the requested
4178 len is greater than strlen(s2)+1. In that case try to
4179 use store_by_pieces, if it fails, punt. */
4180 if (tree_int_cst_lt (slen, len))
4182 unsigned int dest_align = get_pointer_alignment (dest);
4183 const char *p = c_getstr (src);
4184 rtx dest_mem;
4186 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4187 || !can_store_by_pieces (tree_to_uhwi (len),
4188 builtin_strncpy_read_str,
4189 CONST_CAST (char *, p),
4190 dest_align, false))
4191 return NULL_RTX;
4193 dest_mem = get_memory_rtx (dest, len);
4194 store_by_pieces (dest_mem, tree_to_uhwi (len),
4195 builtin_strncpy_read_str,
4196 CONST_CAST (char *, p), dest_align, false, 0);
4197 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4198 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4199 return dest_mem;
4202 return NULL_RTX;
4205 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4206 bytes from constant string DATA + OFFSET and return it as target
4207 constant. */
4210 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4211 scalar_int_mode mode)
4213 const char *c = (const char *) data;
4214 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4216 memset (p, *c, GET_MODE_SIZE (mode));
4218 return c_readstr (p, mode);
4221 /* Callback routine for store_by_pieces. Return the RTL of a register
4222 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4223 char value given in the RTL register data. For example, if mode is
4224 4 bytes wide, return the RTL for 0x01010101*data. */
4226 static rtx
4227 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4228 scalar_int_mode mode)
4230 rtx target, coeff;
4231 size_t size;
4232 char *p;
4234 size = GET_MODE_SIZE (mode);
4235 if (size == 1)
4236 return (rtx) data;
4238 p = XALLOCAVEC (char, size);
4239 memset (p, 1, size);
4240 coeff = c_readstr (p, mode);
4242 target = convert_to_mode (mode, (rtx) data, 1);
4243 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4244 return force_reg (mode, target);
4247 /* Expand expression EXP, which is a call to the memset builtin. Return
4248 NULL_RTX if we failed the caller should emit a normal call, otherwise
4249 try to get the result in TARGET, if convenient (and in mode MODE if that's
4250 convenient). */
4252 static rtx
4253 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4259 tree dest = CALL_EXPR_ARG (exp, 0);
4260 tree val = CALL_EXPR_ARG (exp, 1);
4261 tree len = CALL_EXPR_ARG (exp, 2);
4263 check_memop_access (exp, dest, NULL_TREE, len);
4265 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4268 /* Helper function to do the actual work for expand_builtin_memset. The
4269 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4270 so that this can also be called without constructing an actual CALL_EXPR.
4271 The other arguments and return value are the same as for
4272 expand_builtin_memset. */
4274 static rtx
4275 expand_builtin_memset_args (tree dest, tree val, tree len,
4276 rtx target, machine_mode mode, tree orig_exp)
4278 tree fndecl, fn;
4279 enum built_in_function fcode;
4280 machine_mode val_mode;
4281 char c;
4282 unsigned int dest_align;
4283 rtx dest_mem, dest_addr, len_rtx;
4284 HOST_WIDE_INT expected_size = -1;
4285 unsigned int expected_align = 0;
4286 unsigned HOST_WIDE_INT min_size;
4287 unsigned HOST_WIDE_INT max_size;
4288 unsigned HOST_WIDE_INT probable_max_size;
4290 dest_align = get_pointer_alignment (dest);
4292 /* If DEST is not a pointer type, don't do this operation in-line. */
4293 if (dest_align == 0)
4294 return NULL_RTX;
4296 if (currently_expanding_gimple_stmt)
4297 stringop_block_profile (currently_expanding_gimple_stmt,
4298 &expected_align, &expected_size);
4300 if (expected_align < dest_align)
4301 expected_align = dest_align;
4303 /* If the LEN parameter is zero, return DEST. */
4304 if (integer_zerop (len))
4306 /* Evaluate and ignore VAL in case it has side-effects. */
4307 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4308 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4311 /* Stabilize the arguments in case we fail. */
4312 dest = builtin_save_expr (dest);
4313 val = builtin_save_expr (val);
4314 len = builtin_save_expr (len);
4316 len_rtx = expand_normal (len);
4317 determine_block_size (len, len_rtx, &min_size, &max_size,
4318 &probable_max_size);
4319 dest_mem = get_memory_rtx (dest, len);
4320 val_mode = TYPE_MODE (unsigned_char_type_node);
4322 if (TREE_CODE (val) != INTEGER_CST)
4324 rtx val_rtx;
4326 val_rtx = expand_normal (val);
4327 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4329 /* Assume that we can memset by pieces if we can store
4330 * the coefficients by pieces (in the required modes).
4331 * We can't pass builtin_memset_gen_str as that emits RTL. */
4332 c = 1;
4333 if (tree_fits_uhwi_p (len)
4334 && can_store_by_pieces (tree_to_uhwi (len),
4335 builtin_memset_read_str, &c, dest_align,
4336 true))
4338 val_rtx = force_reg (val_mode, val_rtx);
4339 store_by_pieces (dest_mem, tree_to_uhwi (len),
4340 builtin_memset_gen_str, val_rtx, dest_align,
4341 true, 0);
4343 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4344 dest_align, expected_align,
4345 expected_size, min_size, max_size,
4346 probable_max_size))
4347 goto do_libcall;
4349 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4350 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4351 return dest_mem;
4354 if (target_char_cast (val, &c))
4355 goto do_libcall;
4357 if (c)
4359 if (tree_fits_uhwi_p (len)
4360 && can_store_by_pieces (tree_to_uhwi (len),
4361 builtin_memset_read_str, &c, dest_align,
4362 true))
4363 store_by_pieces (dest_mem, tree_to_uhwi (len),
4364 builtin_memset_read_str, &c, dest_align, true, 0);
4365 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4366 gen_int_mode (c, val_mode),
4367 dest_align, expected_align,
4368 expected_size, min_size, max_size,
4369 probable_max_size))
4370 goto do_libcall;
4372 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4373 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4374 return dest_mem;
4377 set_mem_align (dest_mem, dest_align);
4378 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4379 CALL_EXPR_TAILCALL (orig_exp)
4380 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4381 expected_align, expected_size,
4382 min_size, max_size,
4383 probable_max_size);
4385 if (dest_addr == 0)
4387 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4388 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4391 return dest_addr;
4393 do_libcall:
4394 fndecl = get_callee_fndecl (orig_exp);
4395 fcode = DECL_FUNCTION_CODE (fndecl);
4396 if (fcode == BUILT_IN_MEMSET)
4397 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4398 dest, val, len);
4399 else if (fcode == BUILT_IN_BZERO)
4400 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4401 dest, len);
4402 else
4403 gcc_unreachable ();
4404 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4405 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4406 return expand_call (fn, target, target == const0_rtx);
4409 /* Expand expression EXP, which is a call to the bzero builtin. Return
4410 NULL_RTX if we failed the caller should emit a normal call. */
4412 static rtx
4413 expand_builtin_bzero (tree exp)
4415 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4416 return NULL_RTX;
4418 tree dest = CALL_EXPR_ARG (exp, 0);
4419 tree size = CALL_EXPR_ARG (exp, 1);
4421 check_memop_access (exp, dest, NULL_TREE, size);
4423 /* New argument list transforming bzero(ptr x, int y) to
4424 memset(ptr x, int 0, size_t y). This is done this way
4425 so that if it isn't expanded inline, we fallback to
4426 calling bzero instead of memset. */
4428 location_t loc = EXPR_LOCATION (exp);
4430 return expand_builtin_memset_args (dest, integer_zero_node,
4431 fold_convert_loc (loc,
4432 size_type_node, size),
4433 const0_rtx, VOIDmode, exp);
4436 /* Try to expand cmpstr operation ICODE with the given operands.
4437 Return the result rtx on success, otherwise return null. */
4439 static rtx
4440 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4441 HOST_WIDE_INT align)
4443 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4445 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4446 target = NULL_RTX;
4448 struct expand_operand ops[4];
4449 create_output_operand (&ops[0], target, insn_mode);
4450 create_fixed_operand (&ops[1], arg1_rtx);
4451 create_fixed_operand (&ops[2], arg2_rtx);
4452 create_integer_operand (&ops[3], align);
4453 if (maybe_expand_insn (icode, 4, ops))
4454 return ops[0].value;
4455 return NULL_RTX;
4458 /* Expand expression EXP, which is a call to the memcmp built-in function.
4459 Return NULL_RTX if we failed and the caller should emit a normal call,
4460 otherwise try to get the result in TARGET, if convenient.
4461 RESULT_EQ is true if we can relax the returned value to be either zero
4462 or nonzero, without caring about the sign. */
4464 static rtx
4465 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4467 if (!validate_arglist (exp,
4468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4469 return NULL_RTX;
4471 tree arg1 = CALL_EXPR_ARG (exp, 0);
4472 tree arg2 = CALL_EXPR_ARG (exp, 1);
4473 tree len = CALL_EXPR_ARG (exp, 2);
4474 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4475 bool no_overflow = true;
4477 /* Diagnose calls where the specified length exceeds the size of either
4478 object. */
4479 tree size = compute_objsize (arg1, 0);
4480 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4481 len, /*maxread=*/NULL_TREE, size,
4482 /*objsize=*/NULL_TREE);
4483 if (no_overflow)
4485 size = compute_objsize (arg2, 0);
4486 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4487 len, /*maxread=*/NULL_TREE, size,
4488 /*objsize=*/NULL_TREE);
4491 /* If the specified length exceeds the size of either object,
4492 call the function. */
4493 if (!no_overflow)
4494 return NULL_RTX;
4496 /* Due to the performance benefit, always inline the calls first
4497 when result_eq is false. */
4498 rtx result = NULL_RTX;
4500 if (!result_eq && fcode != BUILT_IN_BCMP)
4502 result = inline_expand_builtin_string_cmp (exp, target);
4503 if (result)
4504 return result;
4507 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4508 location_t loc = EXPR_LOCATION (exp);
4510 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4511 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4513 /* If we don't have POINTER_TYPE, call the function. */
4514 if (arg1_align == 0 || arg2_align == 0)
4515 return NULL_RTX;
4517 rtx arg1_rtx = get_memory_rtx (arg1, len);
4518 rtx arg2_rtx = get_memory_rtx (arg2, len);
4519 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4521 /* Set MEM_SIZE as appropriate. */
4522 if (CONST_INT_P (len_rtx))
4524 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4525 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4528 by_pieces_constfn constfn = NULL;
4530 const char *src_str = c_getstr (arg2);
4531 if (result_eq && src_str == NULL)
4533 src_str = c_getstr (arg1);
4534 if (src_str != NULL)
4535 std::swap (arg1_rtx, arg2_rtx);
4538 /* If SRC is a string constant and block move would be done
4539 by pieces, we can avoid loading the string from memory
4540 and only stored the computed constants. */
4541 if (src_str
4542 && CONST_INT_P (len_rtx)
4543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4544 constfn = builtin_memcpy_read_str;
4546 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4547 TREE_TYPE (len), target,
4548 result_eq, constfn,
4549 CONST_CAST (char *, src_str));
4551 if (result)
4553 /* Return the value in the proper mode for this function. */
4554 if (GET_MODE (result) == mode)
4555 return result;
4557 if (target != 0)
4559 convert_move (target, result, 0);
4560 return target;
4563 return convert_to_mode (mode, result, 0);
4566 return NULL_RTX;
4569 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4570 if we failed the caller should emit a normal call, otherwise try to get
4571 the result in TARGET, if convenient. */
4573 static rtx
4574 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4577 return NULL_RTX;
4579 /* Due to the performance benefit, always inline the calls first. */
4580 rtx result = NULL_RTX;
4581 result = inline_expand_builtin_string_cmp (exp, target);
4582 if (result)
4583 return result;
4585 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4586 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4587 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4588 return NULL_RTX;
4590 tree arg1 = CALL_EXPR_ARG (exp, 0);
4591 tree arg2 = CALL_EXPR_ARG (exp, 1);
4593 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4594 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4596 /* If we don't have POINTER_TYPE, call the function. */
4597 if (arg1_align == 0 || arg2_align == 0)
4598 return NULL_RTX;
4600 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4601 arg1 = builtin_save_expr (arg1);
4602 arg2 = builtin_save_expr (arg2);
4604 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4605 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4607 /* Try to call cmpstrsi. */
4608 if (cmpstr_icode != CODE_FOR_nothing)
4609 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4610 MIN (arg1_align, arg2_align));
4612 /* Try to determine at least one length and call cmpstrnsi. */
4613 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4615 tree len;
4616 rtx arg3_rtx;
4618 tree len1 = c_strlen (arg1, 1);
4619 tree len2 = c_strlen (arg2, 1);
4621 if (len1)
4622 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4623 if (len2)
4624 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4626 /* If we don't have a constant length for the first, use the length
4627 of the second, if we know it. We don't require a constant for
4628 this case; some cost analysis could be done if both are available
4629 but neither is constant. For now, assume they're equally cheap,
4630 unless one has side effects. If both strings have constant lengths,
4631 use the smaller. */
4633 if (!len1)
4634 len = len2;
4635 else if (!len2)
4636 len = len1;
4637 else if (TREE_SIDE_EFFECTS (len1))
4638 len = len2;
4639 else if (TREE_SIDE_EFFECTS (len2))
4640 len = len1;
4641 else if (TREE_CODE (len1) != INTEGER_CST)
4642 len = len2;
4643 else if (TREE_CODE (len2) != INTEGER_CST)
4644 len = len1;
4645 else if (tree_int_cst_lt (len1, len2))
4646 len = len1;
4647 else
4648 len = len2;
4650 /* If both arguments have side effects, we cannot optimize. */
4651 if (len && !TREE_SIDE_EFFECTS (len))
4653 arg3_rtx = expand_normal (len);
4654 result = expand_cmpstrn_or_cmpmem
4655 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4656 arg3_rtx, MIN (arg1_align, arg2_align));
4660 tree fndecl = get_callee_fndecl (exp);
4661 if (result)
4663 /* Check to see if the argument was declared attribute nonstring
4664 and if so, issue a warning since at this point it's not known
4665 to be nul-terminated. */
4666 maybe_warn_nonstring_arg (fndecl, exp);
4668 /* Return the value in the proper mode for this function. */
4669 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4670 if (GET_MODE (result) == mode)
4671 return result;
4672 if (target == 0)
4673 return convert_to_mode (mode, result, 0);
4674 convert_move (target, result, 0);
4675 return target;
4678 /* Expand the library call ourselves using a stabilized argument
4679 list to avoid re-evaluating the function's arguments twice. */
4680 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4681 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4682 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4683 return expand_call (fn, target, target == const0_rtx);
4686 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4687 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4688 the result in TARGET, if convenient. */
4690 static rtx
4691 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4692 ATTRIBUTE_UNUSED machine_mode mode)
4694 if (!validate_arglist (exp,
4695 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4696 return NULL_RTX;
4698 /* Due to the performance benefit, always inline the calls first. */
4699 rtx result = NULL_RTX;
4700 result = inline_expand_builtin_string_cmp (exp, target);
4701 if (result)
4702 return result;
4704 /* If c_strlen can determine an expression for one of the string
4705 lengths, and it doesn't have side effects, then emit cmpstrnsi
4706 using length MIN(strlen(string)+1, arg3). */
4707 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4708 if (cmpstrn_icode == CODE_FOR_nothing)
4709 return NULL_RTX;
4711 tree len;
4713 tree arg1 = CALL_EXPR_ARG (exp, 0);
4714 tree arg2 = CALL_EXPR_ARG (exp, 1);
4715 tree arg3 = CALL_EXPR_ARG (exp, 2);
4717 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4718 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4720 tree len1 = c_strlen (arg1, 1);
4721 tree len2 = c_strlen (arg2, 1);
4723 location_t loc = EXPR_LOCATION (exp);
4725 if (len1)
4726 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4727 if (len2)
4728 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4730 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4732 /* If we don't have a constant length for the first, use the length
4733 of the second, if we know it. If neither string is constant length,
4734 use the given length argument. We don't require a constant for
4735 this case; some cost analysis could be done if both are available
4736 but neither is constant. For now, assume they're equally cheap,
4737 unless one has side effects. If both strings have constant lengths,
4738 use the smaller. */
4740 if (!len1 && !len2)
4741 len = len3;
4742 else if (!len1)
4743 len = len2;
4744 else if (!len2)
4745 len = len1;
4746 else if (TREE_SIDE_EFFECTS (len1))
4747 len = len2;
4748 else if (TREE_SIDE_EFFECTS (len2))
4749 len = len1;
4750 else if (TREE_CODE (len1) != INTEGER_CST)
4751 len = len2;
4752 else if (TREE_CODE (len2) != INTEGER_CST)
4753 len = len1;
4754 else if (tree_int_cst_lt (len1, len2))
4755 len = len1;
4756 else
4757 len = len2;
4759 /* If we are not using the given length, we must incorporate it here.
4760 The actual new length parameter will be MIN(len,arg3) in this case. */
4761 if (len != len3)
4762 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4763 rtx arg1_rtx = get_memory_rtx (arg1, len);
4764 rtx arg2_rtx = get_memory_rtx (arg2, len);
4765 rtx arg3_rtx = expand_normal (len);
4766 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4767 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4768 MIN (arg1_align, arg2_align));
4770 tree fndecl = get_callee_fndecl (exp);
4771 if (result)
4773 /* Check to see if the argument was declared attribute nonstring
4774 and if so, issue a warning since at this point it's not known
4775 to be nul-terminated. */
4776 maybe_warn_nonstring_arg (fndecl, exp);
4778 /* Return the value in the proper mode for this function. */
4779 mode = TYPE_MODE (TREE_TYPE (exp));
4780 if (GET_MODE (result) == mode)
4781 return result;
4782 if (target == 0)
4783 return convert_to_mode (mode, result, 0);
4784 convert_move (target, result, 0);
4785 return target;
4788 /* Expand the library call ourselves using a stabilized argument
4789 list to avoid re-evaluating the function's arguments twice. */
4790 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4791 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4792 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4793 return expand_call (fn, target, target == const0_rtx);
4796 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4797 if that's convenient. */
4800 expand_builtin_saveregs (void)
4802 rtx val;
4803 rtx_insn *seq;
4805 /* Don't do __builtin_saveregs more than once in a function.
4806 Save the result of the first call and reuse it. */
4807 if (saveregs_value != 0)
4808 return saveregs_value;
4810 /* When this function is called, it means that registers must be
4811 saved on entry to this function. So we migrate the call to the
4812 first insn of this function. */
4814 start_sequence ();
4816 /* Do whatever the machine needs done in this case. */
4817 val = targetm.calls.expand_builtin_saveregs ();
4819 seq = get_insns ();
4820 end_sequence ();
4822 saveregs_value = val;
4824 /* Put the insns after the NOTE that starts the function. If this
4825 is inside a start_sequence, make the outer-level insn chain current, so
4826 the code is placed at the start of the function. */
4827 push_topmost_sequence ();
4828 emit_insn_after (seq, entry_of_function ());
4829 pop_topmost_sequence ();
4831 return val;
4834 /* Expand a call to __builtin_next_arg. */
4836 static rtx
4837 expand_builtin_next_arg (void)
4839 /* Checking arguments is already done in fold_builtin_next_arg
4840 that must be called before this function. */
4841 return expand_binop (ptr_mode, add_optab,
4842 crtl->args.internal_arg_pointer,
4843 crtl->args.arg_offset_rtx,
4844 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4847 /* Make it easier for the backends by protecting the valist argument
4848 from multiple evaluations. */
4850 static tree
4851 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4853 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4855 /* The current way of determining the type of valist is completely
4856 bogus. We should have the information on the va builtin instead. */
4857 if (!vatype)
4858 vatype = targetm.fn_abi_va_list (cfun->decl);
4860 if (TREE_CODE (vatype) == ARRAY_TYPE)
4862 if (TREE_SIDE_EFFECTS (valist))
4863 valist = save_expr (valist);
4865 /* For this case, the backends will be expecting a pointer to
4866 vatype, but it's possible we've actually been given an array
4867 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4868 So fix it. */
4869 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4871 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4872 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4875 else
4877 tree pt = build_pointer_type (vatype);
4879 if (! needs_lvalue)
4881 if (! TREE_SIDE_EFFECTS (valist))
4882 return valist;
4884 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4885 TREE_SIDE_EFFECTS (valist) = 1;
4888 if (TREE_SIDE_EFFECTS (valist))
4889 valist = save_expr (valist);
4890 valist = fold_build2_loc (loc, MEM_REF,
4891 vatype, valist, build_int_cst (pt, 0));
4894 return valist;
4897 /* The "standard" definition of va_list is void*. */
4899 tree
4900 std_build_builtin_va_list (void)
4902 return ptr_type_node;
4905 /* The "standard" abi va_list is va_list_type_node. */
4907 tree
4908 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4910 return va_list_type_node;
4913 /* The "standard" type of va_list is va_list_type_node. */
4915 tree
4916 std_canonical_va_list_type (tree type)
4918 tree wtype, htype;
4920 wtype = va_list_type_node;
4921 htype = type;
4923 if (TREE_CODE (wtype) == ARRAY_TYPE)
4925 /* If va_list is an array type, the argument may have decayed
4926 to a pointer type, e.g. by being passed to another function.
4927 In that case, unwrap both types so that we can compare the
4928 underlying records. */
4929 if (TREE_CODE (htype) == ARRAY_TYPE
4930 || POINTER_TYPE_P (htype))
4932 wtype = TREE_TYPE (wtype);
4933 htype = TREE_TYPE (htype);
4936 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4937 return va_list_type_node;
4939 return NULL_TREE;
4942 /* The "standard" implementation of va_start: just assign `nextarg' to
4943 the variable. */
4945 void
4946 std_expand_builtin_va_start (tree valist, rtx nextarg)
4948 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4949 convert_move (va_r, nextarg, 0);
4952 /* Expand EXP, a call to __builtin_va_start. */
4954 static rtx
4955 expand_builtin_va_start (tree exp)
4957 rtx nextarg;
4958 tree valist;
4959 location_t loc = EXPR_LOCATION (exp);
4961 if (call_expr_nargs (exp) < 2)
4963 error_at (loc, "too few arguments to function %<va_start%>");
4964 return const0_rtx;
4967 if (fold_builtin_next_arg (exp, true))
4968 return const0_rtx;
4970 nextarg = expand_builtin_next_arg ();
4971 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4973 if (targetm.expand_builtin_va_start)
4974 targetm.expand_builtin_va_start (valist, nextarg);
4975 else
4976 std_expand_builtin_va_start (valist, nextarg);
4978 return const0_rtx;
4981 /* Expand EXP, a call to __builtin_va_end. */
4983 static rtx
4984 expand_builtin_va_end (tree exp)
4986 tree valist = CALL_EXPR_ARG (exp, 0);
4988 /* Evaluate for side effects, if needed. I hate macros that don't
4989 do that. */
4990 if (TREE_SIDE_EFFECTS (valist))
4991 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4993 return const0_rtx;
4996 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4997 builtin rather than just as an assignment in stdarg.h because of the
4998 nastiness of array-type va_list types. */
5000 static rtx
5001 expand_builtin_va_copy (tree exp)
5003 tree dst, src, t;
5004 location_t loc = EXPR_LOCATION (exp);
5006 dst = CALL_EXPR_ARG (exp, 0);
5007 src = CALL_EXPR_ARG (exp, 1);
5009 dst = stabilize_va_list_loc (loc, dst, 1);
5010 src = stabilize_va_list_loc (loc, src, 0);
5012 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5014 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5016 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5017 TREE_SIDE_EFFECTS (t) = 1;
5018 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5020 else
5022 rtx dstb, srcb, size;
5024 /* Evaluate to pointers. */
5025 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5026 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5027 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5028 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5030 dstb = convert_memory_address (Pmode, dstb);
5031 srcb = convert_memory_address (Pmode, srcb);
5033 /* "Dereference" to BLKmode memories. */
5034 dstb = gen_rtx_MEM (BLKmode, dstb);
5035 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5036 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5037 srcb = gen_rtx_MEM (BLKmode, srcb);
5038 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5039 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5041 /* Copy. */
5042 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5045 return const0_rtx;
5048 /* Expand a call to one of the builtin functions __builtin_frame_address or
5049 __builtin_return_address. */
5051 static rtx
5052 expand_builtin_frame_address (tree fndecl, tree exp)
5054 /* The argument must be a nonnegative integer constant.
5055 It counts the number of frames to scan up the stack.
5056 The value is either the frame pointer value or the return
5057 address saved in that frame. */
5058 if (call_expr_nargs (exp) == 0)
5059 /* Warning about missing arg was already issued. */
5060 return const0_rtx;
5061 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5063 error ("invalid argument to %qD", fndecl);
5064 return const0_rtx;
5066 else
5068 /* Number of frames to scan up the stack. */
5069 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5071 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5073 /* Some ports cannot access arbitrary stack frames. */
5074 if (tem == NULL)
5076 warning (0, "unsupported argument to %qD", fndecl);
5077 return const0_rtx;
5080 if (count)
5082 /* Warn since no effort is made to ensure that any frame
5083 beyond the current one exists or can be safely reached. */
5084 warning (OPT_Wframe_address, "calling %qD with "
5085 "a nonzero argument is unsafe", fndecl);
5088 /* For __builtin_frame_address, return what we've got. */
5089 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5090 return tem;
5092 if (!REG_P (tem)
5093 && ! CONSTANT_P (tem))
5094 tem = copy_addr_to_reg (tem);
5095 return tem;
5099 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5100 failed and the caller should emit a normal call. */
5102 static rtx
5103 expand_builtin_alloca (tree exp)
5105 rtx op0;
5106 rtx result;
5107 unsigned int align;
5108 tree fndecl = get_callee_fndecl (exp);
5109 HOST_WIDE_INT max_size;
5110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5111 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5112 bool valid_arglist
5113 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5114 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5115 VOID_TYPE)
5116 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5117 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5118 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5120 if (!valid_arglist)
5121 return NULL_RTX;
5123 if ((alloca_for_var
5124 && warn_vla_limit >= HOST_WIDE_INT_MAX
5125 && warn_alloc_size_limit < warn_vla_limit)
5126 || (!alloca_for_var
5127 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5128 && warn_alloc_size_limit < warn_alloca_limit
5131 /* -Walloca-larger-than and -Wvla-larger-than settings of
5132 less than HOST_WIDE_INT_MAX override the more general
5133 -Walloc-size-larger-than so unless either of the former
5134 options is smaller than the last one (wchich would imply
5135 that the call was already checked), check the alloca
5136 arguments for overflow. */
5137 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5138 int idx[] = { 0, -1 };
5139 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5142 /* Compute the argument. */
5143 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5145 /* Compute the alignment. */
5146 align = (fcode == BUILT_IN_ALLOCA
5147 ? BIGGEST_ALIGNMENT
5148 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5150 /* Compute the maximum size. */
5151 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5152 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5153 : -1);
5155 /* Allocate the desired space. If the allocation stems from the declaration
5156 of a variable-sized object, it cannot accumulate. */
5157 result
5158 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5159 result = convert_memory_address (ptr_mode, result);
5161 return result;
5164 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5165 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5166 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5167 handle_builtin_stack_restore function. */
5169 static rtx
5170 expand_asan_emit_allocas_unpoison (tree exp)
5172 tree arg0 = CALL_EXPR_ARG (exp, 0);
5173 tree arg1 = CALL_EXPR_ARG (exp, 1);
5174 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5175 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5176 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5177 stack_pointer_rtx, NULL_RTX, 0,
5178 OPTAB_LIB_WIDEN);
5179 off = convert_modes (ptr_mode, Pmode, off, 0);
5180 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5181 OPTAB_LIB_WIDEN);
5182 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5183 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5184 top, ptr_mode, bot, ptr_mode);
5185 return ret;
5188 /* Expand a call to bswap builtin in EXP.
5189 Return NULL_RTX if a normal call should be emitted rather than expanding the
5190 function in-line. If convenient, the result should be placed in TARGET.
5191 SUBTARGET may be used as the target for computing one of EXP's operands. */
5193 static rtx
5194 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5195 rtx subtarget)
5197 tree arg;
5198 rtx op0;
5200 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5201 return NULL_RTX;
5203 arg = CALL_EXPR_ARG (exp, 0);
5204 op0 = expand_expr (arg,
5205 subtarget && GET_MODE (subtarget) == target_mode
5206 ? subtarget : NULL_RTX,
5207 target_mode, EXPAND_NORMAL);
5208 if (GET_MODE (op0) != target_mode)
5209 op0 = convert_to_mode (target_mode, op0, 1);
5211 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5213 gcc_assert (target);
5215 return convert_to_mode (target_mode, target, 1);
5218 /* Expand a call to a unary builtin in EXP.
5219 Return NULL_RTX if a normal call should be emitted rather than expanding the
5220 function in-line. If convenient, the result should be placed in TARGET.
5221 SUBTARGET may be used as the target for computing one of EXP's operands. */
5223 static rtx
5224 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5225 rtx subtarget, optab op_optab)
5227 rtx op0;
5229 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5230 return NULL_RTX;
5232 /* Compute the argument. */
5233 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5234 (subtarget
5235 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5236 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5237 VOIDmode, EXPAND_NORMAL);
5238 /* Compute op, into TARGET if possible.
5239 Set TARGET to wherever the result comes back. */
5240 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5241 op_optab, op0, target, op_optab != clrsb_optab);
5242 gcc_assert (target);
5244 return convert_to_mode (target_mode, target, 0);
5247 /* Expand a call to __builtin_expect. We just return our argument
5248 as the builtin_expect semantic should've been already executed by
5249 tree branch prediction pass. */
5251 static rtx
5252 expand_builtin_expect (tree exp, rtx target)
5254 tree arg;
5256 if (call_expr_nargs (exp) < 2)
5257 return const0_rtx;
5258 arg = CALL_EXPR_ARG (exp, 0);
5260 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5261 /* When guessing was done, the hints should be already stripped away. */
5262 gcc_assert (!flag_guess_branch_prob
5263 || optimize == 0 || seen_error ());
5264 return target;
5267 /* Expand a call to __builtin_expect_with_probability. We just return our
5268 argument as the builtin_expect semantic should've been already executed by
5269 tree branch prediction pass. */
5271 static rtx
5272 expand_builtin_expect_with_probability (tree exp, rtx target)
5274 tree arg;
5276 if (call_expr_nargs (exp) < 3)
5277 return const0_rtx;
5278 arg = CALL_EXPR_ARG (exp, 0);
5280 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5281 /* When guessing was done, the hints should be already stripped away. */
5282 gcc_assert (!flag_guess_branch_prob
5283 || optimize == 0 || seen_error ());
5284 return target;
5288 /* Expand a call to __builtin_assume_aligned. We just return our first
5289 argument as the builtin_assume_aligned semantic should've been already
5290 executed by CCP. */
5292 static rtx
5293 expand_builtin_assume_aligned (tree exp, rtx target)
5295 if (call_expr_nargs (exp) < 2)
5296 return const0_rtx;
5297 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5298 EXPAND_NORMAL);
5299 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5300 && (call_expr_nargs (exp) < 3
5301 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5302 return target;
5305 void
5306 expand_builtin_trap (void)
5308 if (targetm.have_trap ())
5310 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5311 /* For trap insns when not accumulating outgoing args force
5312 REG_ARGS_SIZE note to prevent crossjumping of calls with
5313 different args sizes. */
5314 if (!ACCUMULATE_OUTGOING_ARGS)
5315 add_args_size_note (insn, stack_pointer_delta);
5317 else
5319 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5320 tree call_expr = build_call_expr (fn, 0);
5321 expand_call (call_expr, NULL_RTX, false);
5324 emit_barrier ();
5327 /* Expand a call to __builtin_unreachable. We do nothing except emit
5328 a barrier saying that control flow will not pass here.
5330 It is the responsibility of the program being compiled to ensure
5331 that control flow does never reach __builtin_unreachable. */
5332 static void
5333 expand_builtin_unreachable (void)
5335 emit_barrier ();
5338 /* Expand EXP, a call to fabs, fabsf or fabsl.
5339 Return NULL_RTX if a normal call should be emitted rather than expanding
5340 the function inline. If convenient, the result should be placed
5341 in TARGET. SUBTARGET may be used as the target for computing
5342 the operand. */
5344 static rtx
5345 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5347 machine_mode mode;
5348 tree arg;
5349 rtx op0;
5351 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5352 return NULL_RTX;
5354 arg = CALL_EXPR_ARG (exp, 0);
5355 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5356 mode = TYPE_MODE (TREE_TYPE (arg));
5357 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5358 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5361 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5362 Return NULL is a normal call should be emitted rather than expanding the
5363 function inline. If convenient, the result should be placed in TARGET.
5364 SUBTARGET may be used as the target for computing the operand. */
5366 static rtx
5367 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5369 rtx op0, op1;
5370 tree arg;
5372 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5373 return NULL_RTX;
5375 arg = CALL_EXPR_ARG (exp, 0);
5376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5378 arg = CALL_EXPR_ARG (exp, 1);
5379 op1 = expand_normal (arg);
5381 return expand_copysign (op0, op1, target);
5384 /* Expand a call to __builtin___clear_cache. */
5386 static rtx
5387 expand_builtin___clear_cache (tree exp)
5389 if (!targetm.code_for_clear_cache)
5391 #ifdef CLEAR_INSN_CACHE
5392 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5393 does something. Just do the default expansion to a call to
5394 __clear_cache(). */
5395 return NULL_RTX;
5396 #else
5397 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5398 does nothing. There is no need to call it. Do nothing. */
5399 return const0_rtx;
5400 #endif /* CLEAR_INSN_CACHE */
5403 /* We have a "clear_cache" insn, and it will handle everything. */
5404 tree begin, end;
5405 rtx begin_rtx, end_rtx;
5407 /* We must not expand to a library call. If we did, any
5408 fallback library function in libgcc that might contain a call to
5409 __builtin___clear_cache() would recurse infinitely. */
5410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5412 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5413 return const0_rtx;
5416 if (targetm.have_clear_cache ())
5418 struct expand_operand ops[2];
5420 begin = CALL_EXPR_ARG (exp, 0);
5421 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5423 end = CALL_EXPR_ARG (exp, 1);
5424 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5426 create_address_operand (&ops[0], begin_rtx);
5427 create_address_operand (&ops[1], end_rtx);
5428 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5429 return const0_rtx;
5431 return const0_rtx;
5434 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5436 static rtx
5437 round_trampoline_addr (rtx tramp)
5439 rtx temp, addend, mask;
5441 /* If we don't need too much alignment, we'll have been guaranteed
5442 proper alignment by get_trampoline_type. */
5443 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5444 return tramp;
5446 /* Round address up to desired boundary. */
5447 temp = gen_reg_rtx (Pmode);
5448 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5449 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5451 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5452 temp, 0, OPTAB_LIB_WIDEN);
5453 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5454 temp, 0, OPTAB_LIB_WIDEN);
5456 return tramp;
5459 static rtx
5460 expand_builtin_init_trampoline (tree exp, bool onstack)
5462 tree t_tramp, t_func, t_chain;
5463 rtx m_tramp, r_tramp, r_chain, tmp;
5465 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5466 POINTER_TYPE, VOID_TYPE))
5467 return NULL_RTX;
5469 t_tramp = CALL_EXPR_ARG (exp, 0);
5470 t_func = CALL_EXPR_ARG (exp, 1);
5471 t_chain = CALL_EXPR_ARG (exp, 2);
5473 r_tramp = expand_normal (t_tramp);
5474 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5475 MEM_NOTRAP_P (m_tramp) = 1;
5477 /* If ONSTACK, the TRAMP argument should be the address of a field
5478 within the local function's FRAME decl. Either way, let's see if
5479 we can fill in the MEM_ATTRs for this memory. */
5480 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5481 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5483 /* Creator of a heap trampoline is responsible for making sure the
5484 address is aligned to at least STACK_BOUNDARY. Normally malloc
5485 will ensure this anyhow. */
5486 tmp = round_trampoline_addr (r_tramp);
5487 if (tmp != r_tramp)
5489 m_tramp = change_address (m_tramp, BLKmode, tmp);
5490 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5491 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5494 /* The FUNC argument should be the address of the nested function.
5495 Extract the actual function decl to pass to the hook. */
5496 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5497 t_func = TREE_OPERAND (t_func, 0);
5498 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5500 r_chain = expand_normal (t_chain);
5502 /* Generate insns to initialize the trampoline. */
5503 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5505 if (onstack)
5507 trampolines_created = 1;
5509 if (targetm.calls.custom_function_descriptors != 0)
5510 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5511 "trampoline generated for nested function %qD", t_func);
5514 return const0_rtx;
5517 static rtx
5518 expand_builtin_adjust_trampoline (tree exp)
5520 rtx tramp;
5522 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5523 return NULL_RTX;
5525 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5526 tramp = round_trampoline_addr (tramp);
5527 if (targetm.calls.trampoline_adjust_address)
5528 tramp = targetm.calls.trampoline_adjust_address (tramp);
5530 return tramp;
5533 /* Expand a call to the builtin descriptor initialization routine.
5534 A descriptor is made up of a couple of pointers to the static
5535 chain and the code entry in this order. */
5537 static rtx
5538 expand_builtin_init_descriptor (tree exp)
5540 tree t_descr, t_func, t_chain;
5541 rtx m_descr, r_descr, r_func, r_chain;
5543 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5544 VOID_TYPE))
5545 return NULL_RTX;
5547 t_descr = CALL_EXPR_ARG (exp, 0);
5548 t_func = CALL_EXPR_ARG (exp, 1);
5549 t_chain = CALL_EXPR_ARG (exp, 2);
5551 r_descr = expand_normal (t_descr);
5552 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5553 MEM_NOTRAP_P (m_descr) = 1;
5555 r_func = expand_normal (t_func);
5556 r_chain = expand_normal (t_chain);
5558 /* Generate insns to initialize the descriptor. */
5559 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5560 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5561 POINTER_SIZE / BITS_PER_UNIT), r_func);
5563 return const0_rtx;
5566 /* Expand a call to the builtin descriptor adjustment routine. */
5568 static rtx
5569 expand_builtin_adjust_descriptor (tree exp)
5571 rtx tramp;
5573 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5574 return NULL_RTX;
5576 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5578 /* Unalign the descriptor to allow runtime identification. */
5579 tramp = plus_constant (ptr_mode, tramp,
5580 targetm.calls.custom_function_descriptors);
5582 return force_operand (tramp, NULL_RTX);
5585 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5586 function. The function first checks whether the back end provides
5587 an insn to implement signbit for the respective mode. If not, it
5588 checks whether the floating point format of the value is such that
5589 the sign bit can be extracted. If that is not the case, error out.
5590 EXP is the expression that is a call to the builtin function; if
5591 convenient, the result should be placed in TARGET. */
5592 static rtx
5593 expand_builtin_signbit (tree exp, rtx target)
5595 const struct real_format *fmt;
5596 scalar_float_mode fmode;
5597 scalar_int_mode rmode, imode;
5598 tree arg;
5599 int word, bitpos;
5600 enum insn_code icode;
5601 rtx temp;
5602 location_t loc = EXPR_LOCATION (exp);
5604 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5605 return NULL_RTX;
5607 arg = CALL_EXPR_ARG (exp, 0);
5608 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5609 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5610 fmt = REAL_MODE_FORMAT (fmode);
5612 arg = builtin_save_expr (arg);
5614 /* Expand the argument yielding a RTX expression. */
5615 temp = expand_normal (arg);
5617 /* Check if the back end provides an insn that handles signbit for the
5618 argument's mode. */
5619 icode = optab_handler (signbit_optab, fmode);
5620 if (icode != CODE_FOR_nothing)
5622 rtx_insn *last = get_last_insn ();
5623 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5624 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5625 return target;
5626 delete_insns_since (last);
5629 /* For floating point formats without a sign bit, implement signbit
5630 as "ARG < 0.0". */
5631 bitpos = fmt->signbit_ro;
5632 if (bitpos < 0)
5634 /* But we can't do this if the format supports signed zero. */
5635 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5637 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5638 build_real (TREE_TYPE (arg), dconst0));
5639 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5642 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5644 imode = int_mode_for_mode (fmode).require ();
5645 temp = gen_lowpart (imode, temp);
5647 else
5649 imode = word_mode;
5650 /* Handle targets with different FP word orders. */
5651 if (FLOAT_WORDS_BIG_ENDIAN)
5652 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5653 else
5654 word = bitpos / BITS_PER_WORD;
5655 temp = operand_subword_force (temp, word, fmode);
5656 bitpos = bitpos % BITS_PER_WORD;
5659 /* Force the intermediate word_mode (or narrower) result into a
5660 register. This avoids attempting to create paradoxical SUBREGs
5661 of floating point modes below. */
5662 temp = force_reg (imode, temp);
5664 /* If the bitpos is within the "result mode" lowpart, the operation
5665 can be implement with a single bitwise AND. Otherwise, we need
5666 a right shift and an AND. */
5668 if (bitpos < GET_MODE_BITSIZE (rmode))
5670 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5672 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5673 temp = gen_lowpart (rmode, temp);
5674 temp = expand_binop (rmode, and_optab, temp,
5675 immed_wide_int_const (mask, rmode),
5676 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5678 else
5680 /* Perform a logical right shift to place the signbit in the least
5681 significant bit, then truncate the result to the desired mode
5682 and mask just this bit. */
5683 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5684 temp = gen_lowpart (rmode, temp);
5685 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5686 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5689 return temp;
5692 /* Expand fork or exec calls. TARGET is the desired target of the
5693 call. EXP is the call. FN is the
5694 identificator of the actual function. IGNORE is nonzero if the
5695 value is to be ignored. */
5697 static rtx
5698 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5700 tree id, decl;
5701 tree call;
5703 /* If we are not profiling, just call the function. */
5704 if (!profile_arc_flag)
5705 return NULL_RTX;
5707 /* Otherwise call the wrapper. This should be equivalent for the rest of
5708 compiler, so the code does not diverge, and the wrapper may run the
5709 code necessary for keeping the profiling sane. */
5711 switch (DECL_FUNCTION_CODE (fn))
5713 case BUILT_IN_FORK:
5714 id = get_identifier ("__gcov_fork");
5715 break;
5717 case BUILT_IN_EXECL:
5718 id = get_identifier ("__gcov_execl");
5719 break;
5721 case BUILT_IN_EXECV:
5722 id = get_identifier ("__gcov_execv");
5723 break;
5725 case BUILT_IN_EXECLP:
5726 id = get_identifier ("__gcov_execlp");
5727 break;
5729 case BUILT_IN_EXECLE:
5730 id = get_identifier ("__gcov_execle");
5731 break;
5733 case BUILT_IN_EXECVP:
5734 id = get_identifier ("__gcov_execvp");
5735 break;
5737 case BUILT_IN_EXECVE:
5738 id = get_identifier ("__gcov_execve");
5739 break;
5741 default:
5742 gcc_unreachable ();
5745 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5746 FUNCTION_DECL, id, TREE_TYPE (fn));
5747 DECL_EXTERNAL (decl) = 1;
5748 TREE_PUBLIC (decl) = 1;
5749 DECL_ARTIFICIAL (decl) = 1;
5750 TREE_NOTHROW (decl) = 1;
5751 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5752 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5753 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5754 return expand_call (call, target, ignore);
5759 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5760 the pointer in these functions is void*, the tree optimizers may remove
5761 casts. The mode computed in expand_builtin isn't reliable either, due
5762 to __sync_bool_compare_and_swap.
5764 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5765 group of builtins. This gives us log2 of the mode size. */
5767 static inline machine_mode
5768 get_builtin_sync_mode (int fcode_diff)
5770 /* The size is not negotiable, so ask not to get BLKmode in return
5771 if the target indicates that a smaller size would be better. */
5772 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5775 /* Expand the memory expression LOC and return the appropriate memory operand
5776 for the builtin_sync operations. */
5778 static rtx
5779 get_builtin_sync_mem (tree loc, machine_mode mode)
5781 rtx addr, mem;
5783 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5784 addr = convert_memory_address (Pmode, addr);
5786 /* Note that we explicitly do not want any alias information for this
5787 memory, so that we kill all other live memories. Otherwise we don't
5788 satisfy the full barrier semantics of the intrinsic. */
5789 mem = validize_mem (gen_rtx_MEM (mode, addr));
5791 /* The alignment needs to be at least according to that of the mode. */
5792 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5793 get_pointer_alignment (loc)));
5794 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5795 MEM_VOLATILE_P (mem) = 1;
5797 return mem;
5800 /* Make sure an argument is in the right mode.
5801 EXP is the tree argument.
5802 MODE is the mode it should be in. */
5804 static rtx
5805 expand_expr_force_mode (tree exp, machine_mode mode)
5807 rtx val;
5808 machine_mode old_mode;
5810 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5811 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5812 of CONST_INTs, where we know the old_mode only from the call argument. */
5814 old_mode = GET_MODE (val);
5815 if (old_mode == VOIDmode)
5816 old_mode = TYPE_MODE (TREE_TYPE (exp));
5817 val = convert_modes (mode, old_mode, val, 1);
5818 return val;
5822 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5823 EXP is the CALL_EXPR. CODE is the rtx code
5824 that corresponds to the arithmetic or logical operation from the name;
5825 an exception here is that NOT actually means NAND. TARGET is an optional
5826 place for us to store the results; AFTER is true if this is the
5827 fetch_and_xxx form. */
5829 static rtx
5830 expand_builtin_sync_operation (machine_mode mode, tree exp,
5831 enum rtx_code code, bool after,
5832 rtx target)
5834 rtx val, mem;
5835 location_t loc = EXPR_LOCATION (exp);
5837 if (code == NOT && warn_sync_nand)
5839 tree fndecl = get_callee_fndecl (exp);
5840 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5842 static bool warned_f_a_n, warned_n_a_f;
5844 switch (fcode)
5846 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5847 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5848 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5849 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5850 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5851 if (warned_f_a_n)
5852 break;
5854 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5855 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5856 warned_f_a_n = true;
5857 break;
5859 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5860 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5861 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5862 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5863 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5864 if (warned_n_a_f)
5865 break;
5867 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5868 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5869 warned_n_a_f = true;
5870 break;
5872 default:
5873 gcc_unreachable ();
5877 /* Expand the operands. */
5878 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5879 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5881 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5882 after);
5885 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5886 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5887 true if this is the boolean form. TARGET is a place for us to store the
5888 results; this is NOT optional if IS_BOOL is true. */
5890 static rtx
5891 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5892 bool is_bool, rtx target)
5894 rtx old_val, new_val, mem;
5895 rtx *pbool, *poval;
5897 /* Expand the operands. */
5898 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5899 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5900 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5902 pbool = poval = NULL;
5903 if (target != const0_rtx)
5905 if (is_bool)
5906 pbool = &target;
5907 else
5908 poval = &target;
5910 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5911 false, MEMMODEL_SYNC_SEQ_CST,
5912 MEMMODEL_SYNC_SEQ_CST))
5913 return NULL_RTX;
5915 return target;
5918 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5919 general form is actually an atomic exchange, and some targets only
5920 support a reduced form with the second argument being a constant 1.
5921 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5922 the results. */
5924 static rtx
5925 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5926 rtx target)
5928 rtx val, mem;
5930 /* Expand the operands. */
5931 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5932 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5934 return expand_sync_lock_test_and_set (target, mem, val);
5937 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5939 static void
5940 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5942 rtx mem;
5944 /* Expand the operands. */
5945 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5947 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5950 /* Given an integer representing an ``enum memmodel'', verify its
5951 correctness and return the memory model enum. */
5953 static enum memmodel
5954 get_memmodel (tree exp)
5956 rtx op;
5957 unsigned HOST_WIDE_INT val;
5958 source_location loc
5959 = expansion_point_location_if_in_system_header (input_location);
5961 /* If the parameter is not a constant, it's a run time value so we'll just
5962 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5963 if (TREE_CODE (exp) != INTEGER_CST)
5964 return MEMMODEL_SEQ_CST;
5966 op = expand_normal (exp);
5968 val = INTVAL (op);
5969 if (targetm.memmodel_check)
5970 val = targetm.memmodel_check (val);
5971 else if (val & ~MEMMODEL_MASK)
5973 warning_at (loc, OPT_Winvalid_memory_model,
5974 "unknown architecture specifier in memory model to builtin");
5975 return MEMMODEL_SEQ_CST;
5978 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5979 if (memmodel_base (val) >= MEMMODEL_LAST)
5981 warning_at (loc, OPT_Winvalid_memory_model,
5982 "invalid memory model argument to builtin");
5983 return MEMMODEL_SEQ_CST;
5986 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5987 be conservative and promote consume to acquire. */
5988 if (val == MEMMODEL_CONSUME)
5989 val = MEMMODEL_ACQUIRE;
5991 return (enum memmodel) val;
5994 /* Expand the __atomic_exchange intrinsic:
5995 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5996 EXP is the CALL_EXPR.
5997 TARGET is an optional place for us to store the results. */
5999 static rtx
6000 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6002 rtx val, mem;
6003 enum memmodel model;
6005 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6007 if (!flag_inline_atomics)
6008 return NULL_RTX;
6010 /* Expand the operands. */
6011 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6012 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6014 return expand_atomic_exchange (target, mem, val, model);
6017 /* Expand the __atomic_compare_exchange intrinsic:
6018 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6019 TYPE desired, BOOL weak,
6020 enum memmodel success,
6021 enum memmodel failure)
6022 EXP is the CALL_EXPR.
6023 TARGET is an optional place for us to store the results. */
6025 static rtx
6026 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6027 rtx target)
6029 rtx expect, desired, mem, oldval;
6030 rtx_code_label *label;
6031 enum memmodel success, failure;
6032 tree weak;
6033 bool is_weak;
6034 source_location loc
6035 = expansion_point_location_if_in_system_header (input_location);
6037 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6038 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6040 if (failure > success)
6042 warning_at (loc, OPT_Winvalid_memory_model,
6043 "failure memory model cannot be stronger than success "
6044 "memory model for %<__atomic_compare_exchange%>");
6045 success = MEMMODEL_SEQ_CST;
6048 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6050 warning_at (loc, OPT_Winvalid_memory_model,
6051 "invalid failure memory model for "
6052 "%<__atomic_compare_exchange%>");
6053 failure = MEMMODEL_SEQ_CST;
6054 success = MEMMODEL_SEQ_CST;
6058 if (!flag_inline_atomics)
6059 return NULL_RTX;
6061 /* Expand the operands. */
6062 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6064 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6065 expect = convert_memory_address (Pmode, expect);
6066 expect = gen_rtx_MEM (mode, expect);
6067 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6069 weak = CALL_EXPR_ARG (exp, 3);
6070 is_weak = false;
6071 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6072 is_weak = true;
6074 if (target == const0_rtx)
6075 target = NULL;
6077 /* Lest the rtl backend create a race condition with an imporoper store
6078 to memory, always create a new pseudo for OLDVAL. */
6079 oldval = NULL;
6081 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6082 is_weak, success, failure))
6083 return NULL_RTX;
6085 /* Conditionally store back to EXPECT, lest we create a race condition
6086 with an improper store to memory. */
6087 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6088 the normal case where EXPECT is totally private, i.e. a register. At
6089 which point the store can be unconditional. */
6090 label = gen_label_rtx ();
6091 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6092 GET_MODE (target), 1, label);
6093 emit_move_insn (expect, oldval);
6094 emit_label (label);
6096 return target;
6099 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6100 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6101 call. The weak parameter must be dropped to match the expected parameter
6102 list and the expected argument changed from value to pointer to memory
6103 slot. */
6105 static void
6106 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6108 unsigned int z;
6109 vec<tree, va_gc> *vec;
6111 vec_alloc (vec, 5);
6112 vec->quick_push (gimple_call_arg (call, 0));
6113 tree expected = gimple_call_arg (call, 1);
6114 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6115 TREE_TYPE (expected));
6116 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6117 if (expd != x)
6118 emit_move_insn (x, expd);
6119 tree v = make_tree (TREE_TYPE (expected), x);
6120 vec->quick_push (build1 (ADDR_EXPR,
6121 build_pointer_type (TREE_TYPE (expected)), v));
6122 vec->quick_push (gimple_call_arg (call, 2));
6123 /* Skip the boolean weak parameter. */
6124 for (z = 4; z < 6; z++)
6125 vec->quick_push (gimple_call_arg (call, z));
6126 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6127 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6128 gcc_assert (bytes_log2 < 5);
6129 built_in_function fncode
6130 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6131 + bytes_log2);
6132 tree fndecl = builtin_decl_explicit (fncode);
6133 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6134 fndecl);
6135 tree exp = build_call_vec (boolean_type_node, fn, vec);
6136 tree lhs = gimple_call_lhs (call);
6137 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6138 if (lhs)
6140 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6141 if (GET_MODE (boolret) != mode)
6142 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6143 x = force_reg (mode, x);
6144 write_complex_part (target, boolret, true);
6145 write_complex_part (target, x, false);
6149 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6151 void
6152 expand_ifn_atomic_compare_exchange (gcall *call)
6154 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6155 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6156 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6157 rtx expect, desired, mem, oldval, boolret;
6158 enum memmodel success, failure;
6159 tree lhs;
6160 bool is_weak;
6161 source_location loc
6162 = expansion_point_location_if_in_system_header (gimple_location (call));
6164 success = get_memmodel (gimple_call_arg (call, 4));
6165 failure = get_memmodel (gimple_call_arg (call, 5));
6167 if (failure > success)
6169 warning_at (loc, OPT_Winvalid_memory_model,
6170 "failure memory model cannot be stronger than success "
6171 "memory model for %<__atomic_compare_exchange%>");
6172 success = MEMMODEL_SEQ_CST;
6175 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6177 warning_at (loc, OPT_Winvalid_memory_model,
6178 "invalid failure memory model for "
6179 "%<__atomic_compare_exchange%>");
6180 failure = MEMMODEL_SEQ_CST;
6181 success = MEMMODEL_SEQ_CST;
6184 if (!flag_inline_atomics)
6186 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6187 return;
6190 /* Expand the operands. */
6191 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6193 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6194 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6196 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6198 boolret = NULL;
6199 oldval = NULL;
6201 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6202 is_weak, success, failure))
6204 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6205 return;
6208 lhs = gimple_call_lhs (call);
6209 if (lhs)
6211 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6212 if (GET_MODE (boolret) != mode)
6213 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6214 write_complex_part (target, boolret, true);
6215 write_complex_part (target, oldval, false);
6219 /* Expand the __atomic_load intrinsic:
6220 TYPE __atomic_load (TYPE *object, enum memmodel)
6221 EXP is the CALL_EXPR.
6222 TARGET is an optional place for us to store the results. */
6224 static rtx
6225 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6227 rtx mem;
6228 enum memmodel model;
6230 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6231 if (is_mm_release (model) || is_mm_acq_rel (model))
6233 source_location loc
6234 = expansion_point_location_if_in_system_header (input_location);
6235 warning_at (loc, OPT_Winvalid_memory_model,
6236 "invalid memory model for %<__atomic_load%>");
6237 model = MEMMODEL_SEQ_CST;
6240 if (!flag_inline_atomics)
6241 return NULL_RTX;
6243 /* Expand the operand. */
6244 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6246 return expand_atomic_load (target, mem, model);
6250 /* Expand the __atomic_store intrinsic:
6251 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6252 EXP is the CALL_EXPR.
6253 TARGET is an optional place for us to store the results. */
6255 static rtx
6256 expand_builtin_atomic_store (machine_mode mode, tree exp)
6258 rtx mem, val;
6259 enum memmodel model;
6261 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6262 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6263 || is_mm_release (model)))
6265 source_location loc
6266 = expansion_point_location_if_in_system_header (input_location);
6267 warning_at (loc, OPT_Winvalid_memory_model,
6268 "invalid memory model for %<__atomic_store%>");
6269 model = MEMMODEL_SEQ_CST;
6272 if (!flag_inline_atomics)
6273 return NULL_RTX;
6275 /* Expand the operands. */
6276 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6277 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6279 return expand_atomic_store (mem, val, model, false);
6282 /* Expand the __atomic_fetch_XXX intrinsic:
6283 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6284 EXP is the CALL_EXPR.
6285 TARGET is an optional place for us to store the results.
6286 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6287 FETCH_AFTER is true if returning the result of the operation.
6288 FETCH_AFTER is false if returning the value before the operation.
6289 IGNORE is true if the result is not used.
6290 EXT_CALL is the correct builtin for an external call if this cannot be
6291 resolved to an instruction sequence. */
6293 static rtx
6294 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6295 enum rtx_code code, bool fetch_after,
6296 bool ignore, enum built_in_function ext_call)
6298 rtx val, mem, ret;
6299 enum memmodel model;
6300 tree fndecl;
6301 tree addr;
6303 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6305 /* Expand the operands. */
6306 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6307 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6309 /* Only try generating instructions if inlining is turned on. */
6310 if (flag_inline_atomics)
6312 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6313 if (ret)
6314 return ret;
6317 /* Return if a different routine isn't needed for the library call. */
6318 if (ext_call == BUILT_IN_NONE)
6319 return NULL_RTX;
6321 /* Change the call to the specified function. */
6322 fndecl = get_callee_fndecl (exp);
6323 addr = CALL_EXPR_FN (exp);
6324 STRIP_NOPS (addr);
6326 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6327 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6329 /* If we will emit code after the call, the call can not be a tail call.
6330 If it is emitted as a tail call, a barrier is emitted after it, and
6331 then all trailing code is removed. */
6332 if (!ignore)
6333 CALL_EXPR_TAILCALL (exp) = 0;
6335 /* Expand the call here so we can emit trailing code. */
6336 ret = expand_call (exp, target, ignore);
6338 /* Replace the original function just in case it matters. */
6339 TREE_OPERAND (addr, 0) = fndecl;
6341 /* Then issue the arithmetic correction to return the right result. */
6342 if (!ignore)
6344 if (code == NOT)
6346 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6347 OPTAB_LIB_WIDEN);
6348 ret = expand_simple_unop (mode, NOT, ret, target, true);
6350 else
6351 ret = expand_simple_binop (mode, code, ret, val, target, true,
6352 OPTAB_LIB_WIDEN);
6354 return ret;
6357 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6359 void
6360 expand_ifn_atomic_bit_test_and (gcall *call)
6362 tree ptr = gimple_call_arg (call, 0);
6363 tree bit = gimple_call_arg (call, 1);
6364 tree flag = gimple_call_arg (call, 2);
6365 tree lhs = gimple_call_lhs (call);
6366 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6367 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6368 enum rtx_code code;
6369 optab optab;
6370 struct expand_operand ops[5];
6372 gcc_assert (flag_inline_atomics);
6374 if (gimple_call_num_args (call) == 4)
6375 model = get_memmodel (gimple_call_arg (call, 3));
6377 rtx mem = get_builtin_sync_mem (ptr, mode);
6378 rtx val = expand_expr_force_mode (bit, mode);
6380 switch (gimple_call_internal_fn (call))
6382 case IFN_ATOMIC_BIT_TEST_AND_SET:
6383 code = IOR;
6384 optab = atomic_bit_test_and_set_optab;
6385 break;
6386 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6387 code = XOR;
6388 optab = atomic_bit_test_and_complement_optab;
6389 break;
6390 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6391 code = AND;
6392 optab = atomic_bit_test_and_reset_optab;
6393 break;
6394 default:
6395 gcc_unreachable ();
6398 if (lhs == NULL_TREE)
6400 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6401 val, NULL_RTX, true, OPTAB_DIRECT);
6402 if (code == AND)
6403 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6404 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6405 return;
6408 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6409 enum insn_code icode = direct_optab_handler (optab, mode);
6410 gcc_assert (icode != CODE_FOR_nothing);
6411 create_output_operand (&ops[0], target, mode);
6412 create_fixed_operand (&ops[1], mem);
6413 create_convert_operand_to (&ops[2], val, mode, true);
6414 create_integer_operand (&ops[3], model);
6415 create_integer_operand (&ops[4], integer_onep (flag));
6416 if (maybe_expand_insn (icode, 5, ops))
6417 return;
6419 rtx bitval = val;
6420 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6421 val, NULL_RTX, true, OPTAB_DIRECT);
6422 rtx maskval = val;
6423 if (code == AND)
6424 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6425 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6426 code, model, false);
6427 if (integer_onep (flag))
6429 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6430 NULL_RTX, true, OPTAB_DIRECT);
6431 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6432 true, OPTAB_DIRECT);
6434 else
6435 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6436 OPTAB_DIRECT);
6437 if (result != target)
6438 emit_move_insn (target, result);
6441 /* Expand an atomic clear operation.
6442 void _atomic_clear (BOOL *obj, enum memmodel)
6443 EXP is the call expression. */
6445 static rtx
6446 expand_builtin_atomic_clear (tree exp)
6448 machine_mode mode;
6449 rtx mem, ret;
6450 enum memmodel model;
6452 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6453 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6454 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6456 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6458 source_location loc
6459 = expansion_point_location_if_in_system_header (input_location);
6460 warning_at (loc, OPT_Winvalid_memory_model,
6461 "invalid memory model for %<__atomic_store%>");
6462 model = MEMMODEL_SEQ_CST;
6465 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6466 Failing that, a store is issued by __atomic_store. The only way this can
6467 fail is if the bool type is larger than a word size. Unlikely, but
6468 handle it anyway for completeness. Assume a single threaded model since
6469 there is no atomic support in this case, and no barriers are required. */
6470 ret = expand_atomic_store (mem, const0_rtx, model, true);
6471 if (!ret)
6472 emit_move_insn (mem, const0_rtx);
6473 return const0_rtx;
6476 /* Expand an atomic test_and_set operation.
6477 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6478 EXP is the call expression. */
6480 static rtx
6481 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6483 rtx mem;
6484 enum memmodel model;
6485 machine_mode mode;
6487 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6488 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6489 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6491 return expand_atomic_test_and_set (target, mem, model);
6495 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6496 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6498 static tree
6499 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6501 int size;
6502 machine_mode mode;
6503 unsigned int mode_align, type_align;
6505 if (TREE_CODE (arg0) != INTEGER_CST)
6506 return NULL_TREE;
6508 /* We need a corresponding integer mode for the access to be lock-free. */
6509 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6510 if (!int_mode_for_size (size, 0).exists (&mode))
6511 return boolean_false_node;
6513 mode_align = GET_MODE_ALIGNMENT (mode);
6515 if (TREE_CODE (arg1) == INTEGER_CST)
6517 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6519 /* Either this argument is null, or it's a fake pointer encoding
6520 the alignment of the object. */
6521 val = least_bit_hwi (val);
6522 val *= BITS_PER_UNIT;
6524 if (val == 0 || mode_align < val)
6525 type_align = mode_align;
6526 else
6527 type_align = val;
6529 else
6531 tree ttype = TREE_TYPE (arg1);
6533 /* This function is usually invoked and folded immediately by the front
6534 end before anything else has a chance to look at it. The pointer
6535 parameter at this point is usually cast to a void *, so check for that
6536 and look past the cast. */
6537 if (CONVERT_EXPR_P (arg1)
6538 && POINTER_TYPE_P (ttype)
6539 && VOID_TYPE_P (TREE_TYPE (ttype))
6540 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6541 arg1 = TREE_OPERAND (arg1, 0);
6543 ttype = TREE_TYPE (arg1);
6544 gcc_assert (POINTER_TYPE_P (ttype));
6546 /* Get the underlying type of the object. */
6547 ttype = TREE_TYPE (ttype);
6548 type_align = TYPE_ALIGN (ttype);
6551 /* If the object has smaller alignment, the lock free routines cannot
6552 be used. */
6553 if (type_align < mode_align)
6554 return boolean_false_node;
6556 /* Check if a compare_and_swap pattern exists for the mode which represents
6557 the required size. The pattern is not allowed to fail, so the existence
6558 of the pattern indicates support is present. Also require that an
6559 atomic load exists for the required size. */
6560 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6561 return boolean_true_node;
6562 else
6563 return boolean_false_node;
6566 /* Return true if the parameters to call EXP represent an object which will
6567 always generate lock free instructions. The first argument represents the
6568 size of the object, and the second parameter is a pointer to the object
6569 itself. If NULL is passed for the object, then the result is based on
6570 typical alignment for an object of the specified size. Otherwise return
6571 false. */
6573 static rtx
6574 expand_builtin_atomic_always_lock_free (tree exp)
6576 tree size;
6577 tree arg0 = CALL_EXPR_ARG (exp, 0);
6578 tree arg1 = CALL_EXPR_ARG (exp, 1);
6580 if (TREE_CODE (arg0) != INTEGER_CST)
6582 error ("non-constant argument 1 to __atomic_always_lock_free");
6583 return const0_rtx;
6586 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6587 if (size == boolean_true_node)
6588 return const1_rtx;
6589 return const0_rtx;
6592 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6593 is lock free on this architecture. */
6595 static tree
6596 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6598 if (!flag_inline_atomics)
6599 return NULL_TREE;
6601 /* If it isn't always lock free, don't generate a result. */
6602 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6603 return boolean_true_node;
6605 return NULL_TREE;
6608 /* Return true if the parameters to call EXP represent an object which will
6609 always generate lock free instructions. The first argument represents the
6610 size of the object, and the second parameter is a pointer to the object
6611 itself. If NULL is passed for the object, then the result is based on
6612 typical alignment for an object of the specified size. Otherwise return
6613 NULL*/
6615 static rtx
6616 expand_builtin_atomic_is_lock_free (tree exp)
6618 tree size;
6619 tree arg0 = CALL_EXPR_ARG (exp, 0);
6620 tree arg1 = CALL_EXPR_ARG (exp, 1);
6622 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6624 error ("non-integer argument 1 to __atomic_is_lock_free");
6625 return NULL_RTX;
6628 if (!flag_inline_atomics)
6629 return NULL_RTX;
6631 /* If the value is known at compile time, return the RTX for it. */
6632 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6633 if (size == boolean_true_node)
6634 return const1_rtx;
6636 return NULL_RTX;
6639 /* Expand the __atomic_thread_fence intrinsic:
6640 void __atomic_thread_fence (enum memmodel)
6641 EXP is the CALL_EXPR. */
6643 static void
6644 expand_builtin_atomic_thread_fence (tree exp)
6646 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6647 expand_mem_thread_fence (model);
6650 /* Expand the __atomic_signal_fence intrinsic:
6651 void __atomic_signal_fence (enum memmodel)
6652 EXP is the CALL_EXPR. */
6654 static void
6655 expand_builtin_atomic_signal_fence (tree exp)
6657 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6658 expand_mem_signal_fence (model);
6661 /* Expand the __sync_synchronize intrinsic. */
6663 static void
6664 expand_builtin_sync_synchronize (void)
6666 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6669 static rtx
6670 expand_builtin_thread_pointer (tree exp, rtx target)
6672 enum insn_code icode;
6673 if (!validate_arglist (exp, VOID_TYPE))
6674 return const0_rtx;
6675 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6676 if (icode != CODE_FOR_nothing)
6678 struct expand_operand op;
6679 /* If the target is not sutitable then create a new target. */
6680 if (target == NULL_RTX
6681 || !REG_P (target)
6682 || GET_MODE (target) != Pmode)
6683 target = gen_reg_rtx (Pmode);
6684 create_output_operand (&op, target, Pmode);
6685 expand_insn (icode, 1, &op);
6686 return target;
6688 error ("__builtin_thread_pointer is not supported on this target");
6689 return const0_rtx;
6692 static void
6693 expand_builtin_set_thread_pointer (tree exp)
6695 enum insn_code icode;
6696 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6697 return;
6698 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6699 if (icode != CODE_FOR_nothing)
6701 struct expand_operand op;
6702 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6703 Pmode, EXPAND_NORMAL);
6704 create_input_operand (&op, val, Pmode);
6705 expand_insn (icode, 1, &op);
6706 return;
6708 error ("__builtin_set_thread_pointer is not supported on this target");
6712 /* Emit code to restore the current value of stack. */
6714 static void
6715 expand_stack_restore (tree var)
6717 rtx_insn *prev;
6718 rtx sa = expand_normal (var);
6720 sa = convert_memory_address (Pmode, sa);
6722 prev = get_last_insn ();
6723 emit_stack_restore (SAVE_BLOCK, sa);
6725 record_new_stack_level ();
6727 fixup_args_size_notes (prev, get_last_insn (), 0);
6730 /* Emit code to save the current value of stack. */
6732 static rtx
6733 expand_stack_save (void)
6735 rtx ret = NULL_RTX;
6737 emit_stack_save (SAVE_BLOCK, &ret);
6738 return ret;
6741 /* Emit code to get the openacc gang, worker or vector id or size. */
6743 static rtx
6744 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6746 const char *name;
6747 rtx fallback_retval;
6748 rtx_insn *(*gen_fn) (rtx, rtx);
6749 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6751 case BUILT_IN_GOACC_PARLEVEL_ID:
6752 name = "__builtin_goacc_parlevel_id";
6753 fallback_retval = const0_rtx;
6754 gen_fn = targetm.gen_oacc_dim_pos;
6755 break;
6756 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6757 name = "__builtin_goacc_parlevel_size";
6758 fallback_retval = const1_rtx;
6759 gen_fn = targetm.gen_oacc_dim_size;
6760 break;
6761 default:
6762 gcc_unreachable ();
6765 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6767 error ("%qs only supported in OpenACC code", name);
6768 return const0_rtx;
6771 tree arg = CALL_EXPR_ARG (exp, 0);
6772 if (TREE_CODE (arg) != INTEGER_CST)
6774 error ("non-constant argument 0 to %qs", name);
6775 return const0_rtx;
6778 int dim = TREE_INT_CST_LOW (arg);
6779 switch (dim)
6781 case GOMP_DIM_GANG:
6782 case GOMP_DIM_WORKER:
6783 case GOMP_DIM_VECTOR:
6784 break;
6785 default:
6786 error ("illegal argument 0 to %qs", name);
6787 return const0_rtx;
6790 if (ignore)
6791 return target;
6793 if (target == NULL_RTX)
6794 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6796 if (!targetm.have_oacc_dim_size ())
6798 emit_move_insn (target, fallback_retval);
6799 return target;
6802 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6803 emit_insn (gen_fn (reg, GEN_INT (dim)));
6804 if (reg != target)
6805 emit_move_insn (target, reg);
6807 return target;
6810 /* Expand a string compare operation using a sequence of char comparison
6811 to get rid of the calling overhead, with result going to TARGET if
6812 that's convenient.
6814 VAR_STR is the variable string source;
6815 CONST_STR is the constant string source;
6816 LENGTH is the number of chars to compare;
6817 CONST_STR_N indicates which source string is the constant string;
6818 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6820 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6822 target = (int) (unsigned char) var_str[0]
6823 - (int) (unsigned char) const_str[0];
6824 if (target != 0)
6825 goto ne_label;
6827 target = (int) (unsigned char) var_str[length - 2]
6828 - (int) (unsigned char) const_str[length - 2];
6829 if (target != 0)
6830 goto ne_label;
6831 target = (int) (unsigned char) var_str[length - 1]
6832 - (int) (unsigned char) const_str[length - 1];
6833 ne_label:
6836 static rtx
6837 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6838 unsigned HOST_WIDE_INT length,
6839 int const_str_n, machine_mode mode)
6841 HOST_WIDE_INT offset = 0;
6842 rtx var_rtx_array
6843 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6844 rtx var_rtx = NULL_RTX;
6845 rtx const_rtx = NULL_RTX;
6846 rtx result = target ? target : gen_reg_rtx (mode);
6847 rtx_code_label *ne_label = gen_label_rtx ();
6848 tree unit_type_node = unsigned_char_type_node;
6849 scalar_int_mode unit_mode
6850 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6852 start_sequence ();
6854 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6856 var_rtx
6857 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6858 const_rtx = c_readstr (const_str + offset, unit_mode);
6859 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6860 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6862 op0 = convert_modes (mode, unit_mode, op0, 1);
6863 op1 = convert_modes (mode, unit_mode, op1, 1);
6864 result = expand_simple_binop (mode, MINUS, op0, op1,
6865 result, 1, OPTAB_WIDEN);
6866 if (i < length - 1)
6867 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6868 mode, true, ne_label);
6869 offset += GET_MODE_SIZE (unit_mode);
6872 emit_label (ne_label);
6873 rtx_insn *insns = get_insns ();
6874 end_sequence ();
6875 emit_insn (insns);
6877 return result;
6880 /* Inline expansion a call to str(n)cmp, with result going to
6881 TARGET if that's convenient.
6882 If the call is not been inlined, return NULL_RTX. */
6883 static rtx
6884 inline_expand_builtin_string_cmp (tree exp, rtx target)
6886 tree fndecl = get_callee_fndecl (exp);
6887 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6888 unsigned HOST_WIDE_INT length = 0;
6889 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6891 /* Do NOT apply this inlining expansion when optimizing for size or
6892 optimization level below 2. */
6893 if (optimize < 2 || optimize_insn_for_size_p ())
6894 return NULL_RTX;
6896 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6897 || fcode == BUILT_IN_STRNCMP
6898 || fcode == BUILT_IN_MEMCMP);
6900 /* On a target where the type of the call (int) has same or narrower presicion
6901 than unsigned char, give up the inlining expansion. */
6902 if (TYPE_PRECISION (unsigned_char_type_node)
6903 >= TYPE_PRECISION (TREE_TYPE (exp)))
6904 return NULL_RTX;
6906 tree arg1 = CALL_EXPR_ARG (exp, 0);
6907 tree arg2 = CALL_EXPR_ARG (exp, 1);
6908 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6910 unsigned HOST_WIDE_INT len1 = 0;
6911 unsigned HOST_WIDE_INT len2 = 0;
6912 unsigned HOST_WIDE_INT len3 = 0;
6914 const char *src_str1 = c_getstr (arg1, &len1);
6915 const char *src_str2 = c_getstr (arg2, &len2);
6917 /* If neither strings is constant string, the call is not qualify. */
6918 if (!src_str1 && !src_str2)
6919 return NULL_RTX;
6921 /* For strncmp, if the length is not a const, not qualify. */
6922 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6923 return NULL_RTX;
6925 int const_str_n = 0;
6926 if (!len1)
6927 const_str_n = 2;
6928 else if (!len2)
6929 const_str_n = 1;
6930 else if (len2 > len1)
6931 const_str_n = 1;
6932 else
6933 const_str_n = 2;
6935 gcc_checking_assert (const_str_n > 0);
6936 length = (const_str_n == 1) ? len1 : len2;
6938 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6939 length = len3;
6941 /* If the length of the comparision is larger than the threshold,
6942 do nothing. */
6943 if (length > (unsigned HOST_WIDE_INT)
6944 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6945 return NULL_RTX;
6947 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6949 /* Now, start inline expansion the call. */
6950 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6951 (const_str_n == 1) ? src_str1 : src_str2, length,
6952 const_str_n, mode);
6955 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6956 represents the size of the first argument to that call, or VOIDmode
6957 if the argument is a pointer. IGNORE will be true if the result
6958 isn't used. */
6959 static rtx
6960 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6961 bool ignore)
6963 rtx val, failsafe;
6964 unsigned nargs = call_expr_nargs (exp);
6966 tree arg0 = CALL_EXPR_ARG (exp, 0);
6968 if (mode == VOIDmode)
6970 mode = TYPE_MODE (TREE_TYPE (arg0));
6971 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6974 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6976 /* An optional second argument can be used as a failsafe value on
6977 some machines. If it isn't present, then the failsafe value is
6978 assumed to be 0. */
6979 if (nargs > 1)
6981 tree arg1 = CALL_EXPR_ARG (exp, 1);
6982 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6984 else
6985 failsafe = const0_rtx;
6987 /* If the result isn't used, the behavior is undefined. It would be
6988 nice to emit a warning here, but path splitting means this might
6989 happen with legitimate code. So simply drop the builtin
6990 expansion in that case; we've handled any side-effects above. */
6991 if (ignore)
6992 return const0_rtx;
6994 /* If we don't have a suitable target, create one to hold the result. */
6995 if (target == NULL || GET_MODE (target) != mode)
6996 target = gen_reg_rtx (mode);
6998 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6999 val = convert_modes (mode, VOIDmode, val, false);
7001 return targetm.speculation_safe_value (mode, target, val, failsafe);
7004 /* Expand an expression EXP that calls a built-in function,
7005 with result going to TARGET if that's convenient
7006 (and in mode MODE if that's convenient).
7007 SUBTARGET may be used as the target for computing one of EXP's operands.
7008 IGNORE is nonzero if the value is to be ignored. */
7011 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7012 int ignore)
7014 tree fndecl = get_callee_fndecl (exp);
7015 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7016 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7017 int flags;
7019 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7020 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7022 /* When ASan is enabled, we don't want to expand some memory/string
7023 builtins and rely on libsanitizer's hooks. This allows us to avoid
7024 redundant checks and be sure, that possible overflow will be detected
7025 by ASan. */
7027 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7028 return expand_call (exp, target, ignore);
7030 /* When not optimizing, generate calls to library functions for a certain
7031 set of builtins. */
7032 if (!optimize
7033 && !called_as_built_in (fndecl)
7034 && fcode != BUILT_IN_FORK
7035 && fcode != BUILT_IN_EXECL
7036 && fcode != BUILT_IN_EXECV
7037 && fcode != BUILT_IN_EXECLP
7038 && fcode != BUILT_IN_EXECLE
7039 && fcode != BUILT_IN_EXECVP
7040 && fcode != BUILT_IN_EXECVE
7041 && !ALLOCA_FUNCTION_CODE_P (fcode)
7042 && fcode != BUILT_IN_FREE)
7043 return expand_call (exp, target, ignore);
7045 /* The built-in function expanders test for target == const0_rtx
7046 to determine whether the function's result will be ignored. */
7047 if (ignore)
7048 target = const0_rtx;
7050 /* If the result of a pure or const built-in function is ignored, and
7051 none of its arguments are volatile, we can avoid expanding the
7052 built-in call and just evaluate the arguments for side-effects. */
7053 if (target == const0_rtx
7054 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7055 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7057 bool volatilep = false;
7058 tree arg;
7059 call_expr_arg_iterator iter;
7061 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7062 if (TREE_THIS_VOLATILE (arg))
7064 volatilep = true;
7065 break;
7068 if (! volatilep)
7070 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7071 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7072 return const0_rtx;
7076 switch (fcode)
7078 CASE_FLT_FN (BUILT_IN_FABS):
7079 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7080 case BUILT_IN_FABSD32:
7081 case BUILT_IN_FABSD64:
7082 case BUILT_IN_FABSD128:
7083 target = expand_builtin_fabs (exp, target, subtarget);
7084 if (target)
7085 return target;
7086 break;
7088 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7089 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7090 target = expand_builtin_copysign (exp, target, subtarget);
7091 if (target)
7092 return target;
7093 break;
7095 /* Just do a normal library call if we were unable to fold
7096 the values. */
7097 CASE_FLT_FN (BUILT_IN_CABS):
7098 break;
7100 CASE_FLT_FN (BUILT_IN_FMA):
7101 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7102 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7103 if (target)
7104 return target;
7105 break;
7107 CASE_FLT_FN (BUILT_IN_ILOGB):
7108 if (! flag_unsafe_math_optimizations)
7109 break;
7110 gcc_fallthrough ();
7111 CASE_FLT_FN (BUILT_IN_ISINF):
7112 CASE_FLT_FN (BUILT_IN_FINITE):
7113 case BUILT_IN_ISFINITE:
7114 case BUILT_IN_ISNORMAL:
7115 target = expand_builtin_interclass_mathfn (exp, target);
7116 if (target)
7117 return target;
7118 break;
7120 CASE_FLT_FN (BUILT_IN_ICEIL):
7121 CASE_FLT_FN (BUILT_IN_LCEIL):
7122 CASE_FLT_FN (BUILT_IN_LLCEIL):
7123 CASE_FLT_FN (BUILT_IN_LFLOOR):
7124 CASE_FLT_FN (BUILT_IN_IFLOOR):
7125 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7126 target = expand_builtin_int_roundingfn (exp, target);
7127 if (target)
7128 return target;
7129 break;
7131 CASE_FLT_FN (BUILT_IN_IRINT):
7132 CASE_FLT_FN (BUILT_IN_LRINT):
7133 CASE_FLT_FN (BUILT_IN_LLRINT):
7134 CASE_FLT_FN (BUILT_IN_IROUND):
7135 CASE_FLT_FN (BUILT_IN_LROUND):
7136 CASE_FLT_FN (BUILT_IN_LLROUND):
7137 target = expand_builtin_int_roundingfn_2 (exp, target);
7138 if (target)
7139 return target;
7140 break;
7142 CASE_FLT_FN (BUILT_IN_POWI):
7143 target = expand_builtin_powi (exp, target);
7144 if (target)
7145 return target;
7146 break;
7148 CASE_FLT_FN (BUILT_IN_CEXPI):
7149 target = expand_builtin_cexpi (exp, target);
7150 gcc_assert (target);
7151 return target;
7153 CASE_FLT_FN (BUILT_IN_SIN):
7154 CASE_FLT_FN (BUILT_IN_COS):
7155 if (! flag_unsafe_math_optimizations)
7156 break;
7157 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7158 if (target)
7159 return target;
7160 break;
7162 CASE_FLT_FN (BUILT_IN_SINCOS):
7163 if (! flag_unsafe_math_optimizations)
7164 break;
7165 target = expand_builtin_sincos (exp);
7166 if (target)
7167 return target;
7168 break;
7170 case BUILT_IN_APPLY_ARGS:
7171 return expand_builtin_apply_args ();
7173 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7174 FUNCTION with a copy of the parameters described by
7175 ARGUMENTS, and ARGSIZE. It returns a block of memory
7176 allocated on the stack into which is stored all the registers
7177 that might possibly be used for returning the result of a
7178 function. ARGUMENTS is the value returned by
7179 __builtin_apply_args. ARGSIZE is the number of bytes of
7180 arguments that must be copied. ??? How should this value be
7181 computed? We'll also need a safe worst case value for varargs
7182 functions. */
7183 case BUILT_IN_APPLY:
7184 if (!validate_arglist (exp, POINTER_TYPE,
7185 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7186 && !validate_arglist (exp, REFERENCE_TYPE,
7187 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7188 return const0_rtx;
7189 else
7191 rtx ops[3];
7193 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7194 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7195 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7197 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7200 /* __builtin_return (RESULT) causes the function to return the
7201 value described by RESULT. RESULT is address of the block of
7202 memory returned by __builtin_apply. */
7203 case BUILT_IN_RETURN:
7204 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7205 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7206 return const0_rtx;
7208 case BUILT_IN_SAVEREGS:
7209 return expand_builtin_saveregs ();
7211 case BUILT_IN_VA_ARG_PACK:
7212 /* All valid uses of __builtin_va_arg_pack () are removed during
7213 inlining. */
7214 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7215 return const0_rtx;
7217 case BUILT_IN_VA_ARG_PACK_LEN:
7218 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7219 inlining. */
7220 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7221 return const0_rtx;
7223 /* Return the address of the first anonymous stack arg. */
7224 case BUILT_IN_NEXT_ARG:
7225 if (fold_builtin_next_arg (exp, false))
7226 return const0_rtx;
7227 return expand_builtin_next_arg ();
7229 case BUILT_IN_CLEAR_CACHE:
7230 target = expand_builtin___clear_cache (exp);
7231 if (target)
7232 return target;
7233 break;
7235 case BUILT_IN_CLASSIFY_TYPE:
7236 return expand_builtin_classify_type (exp);
7238 case BUILT_IN_CONSTANT_P:
7239 return const0_rtx;
7241 case BUILT_IN_FRAME_ADDRESS:
7242 case BUILT_IN_RETURN_ADDRESS:
7243 return expand_builtin_frame_address (fndecl, exp);
7245 /* Returns the address of the area where the structure is returned.
7246 0 otherwise. */
7247 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7248 if (call_expr_nargs (exp) != 0
7249 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7250 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7251 return const0_rtx;
7252 else
7253 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7255 CASE_BUILT_IN_ALLOCA:
7256 target = expand_builtin_alloca (exp);
7257 if (target)
7258 return target;
7259 break;
7261 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7262 return expand_asan_emit_allocas_unpoison (exp);
7264 case BUILT_IN_STACK_SAVE:
7265 return expand_stack_save ();
7267 case BUILT_IN_STACK_RESTORE:
7268 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7269 return const0_rtx;
7271 case BUILT_IN_BSWAP16:
7272 case BUILT_IN_BSWAP32:
7273 case BUILT_IN_BSWAP64:
7274 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7275 if (target)
7276 return target;
7277 break;
7279 CASE_INT_FN (BUILT_IN_FFS):
7280 target = expand_builtin_unop (target_mode, exp, target,
7281 subtarget, ffs_optab);
7282 if (target)
7283 return target;
7284 break;
7286 CASE_INT_FN (BUILT_IN_CLZ):
7287 target = expand_builtin_unop (target_mode, exp, target,
7288 subtarget, clz_optab);
7289 if (target)
7290 return target;
7291 break;
7293 CASE_INT_FN (BUILT_IN_CTZ):
7294 target = expand_builtin_unop (target_mode, exp, target,
7295 subtarget, ctz_optab);
7296 if (target)
7297 return target;
7298 break;
7300 CASE_INT_FN (BUILT_IN_CLRSB):
7301 target = expand_builtin_unop (target_mode, exp, target,
7302 subtarget, clrsb_optab);
7303 if (target)
7304 return target;
7305 break;
7307 CASE_INT_FN (BUILT_IN_POPCOUNT):
7308 target = expand_builtin_unop (target_mode, exp, target,
7309 subtarget, popcount_optab);
7310 if (target)
7311 return target;
7312 break;
7314 CASE_INT_FN (BUILT_IN_PARITY):
7315 target = expand_builtin_unop (target_mode, exp, target,
7316 subtarget, parity_optab);
7317 if (target)
7318 return target;
7319 break;
7321 case BUILT_IN_STRLEN:
7322 target = expand_builtin_strlen (exp, target, target_mode);
7323 if (target)
7324 return target;
7325 break;
7327 case BUILT_IN_STRNLEN:
7328 target = expand_builtin_strnlen (exp, target, target_mode);
7329 if (target)
7330 return target;
7331 break;
7333 case BUILT_IN_STRCAT:
7334 target = expand_builtin_strcat (exp, target);
7335 if (target)
7336 return target;
7337 break;
7339 case BUILT_IN_STRCPY:
7340 target = expand_builtin_strcpy (exp, target);
7341 if (target)
7342 return target;
7343 break;
7345 case BUILT_IN_STRNCAT:
7346 target = expand_builtin_strncat (exp, target);
7347 if (target)
7348 return target;
7349 break;
7351 case BUILT_IN_STRNCPY:
7352 target = expand_builtin_strncpy (exp, target);
7353 if (target)
7354 return target;
7355 break;
7357 case BUILT_IN_STPCPY:
7358 target = expand_builtin_stpcpy (exp, target, mode);
7359 if (target)
7360 return target;
7361 break;
7363 case BUILT_IN_STPNCPY:
7364 target = expand_builtin_stpncpy (exp, target);
7365 if (target)
7366 return target;
7367 break;
7369 case BUILT_IN_MEMCHR:
7370 target = expand_builtin_memchr (exp, target);
7371 if (target)
7372 return target;
7373 break;
7375 case BUILT_IN_MEMCPY:
7376 target = expand_builtin_memcpy (exp, target);
7377 if (target)
7378 return target;
7379 break;
7381 case BUILT_IN_MEMMOVE:
7382 target = expand_builtin_memmove (exp, target);
7383 if (target)
7384 return target;
7385 break;
7387 case BUILT_IN_MEMPCPY:
7388 target = expand_builtin_mempcpy (exp, target);
7389 if (target)
7390 return target;
7391 break;
7393 case BUILT_IN_MEMSET:
7394 target = expand_builtin_memset (exp, target, mode);
7395 if (target)
7396 return target;
7397 break;
7399 case BUILT_IN_BZERO:
7400 target = expand_builtin_bzero (exp);
7401 if (target)
7402 return target;
7403 break;
7405 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7406 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7407 when changing it to a strcmp call. */
7408 case BUILT_IN_STRCMP_EQ:
7409 target = expand_builtin_memcmp (exp, target, true);
7410 if (target)
7411 return target;
7413 /* Change this call back to a BUILT_IN_STRCMP. */
7414 TREE_OPERAND (exp, 1)
7415 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7417 /* Delete the last parameter. */
7418 unsigned int i;
7419 vec<tree, va_gc> *arg_vec;
7420 vec_alloc (arg_vec, 2);
7421 for (i = 0; i < 2; i++)
7422 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7423 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7424 /* FALLTHROUGH */
7426 case BUILT_IN_STRCMP:
7427 target = expand_builtin_strcmp (exp, target);
7428 if (target)
7429 return target;
7430 break;
7432 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7433 back to a BUILT_IN_STRNCMP. */
7434 case BUILT_IN_STRNCMP_EQ:
7435 target = expand_builtin_memcmp (exp, target, true);
7436 if (target)
7437 return target;
7439 /* Change it back to a BUILT_IN_STRNCMP. */
7440 TREE_OPERAND (exp, 1)
7441 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7442 /* FALLTHROUGH */
7444 case BUILT_IN_STRNCMP:
7445 target = expand_builtin_strncmp (exp, target, mode);
7446 if (target)
7447 return target;
7448 break;
7450 case BUILT_IN_BCMP:
7451 case BUILT_IN_MEMCMP:
7452 case BUILT_IN_MEMCMP_EQ:
7453 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7454 if (target)
7455 return target;
7456 if (fcode == BUILT_IN_MEMCMP_EQ)
7458 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7459 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7461 break;
7463 case BUILT_IN_SETJMP:
7464 /* This should have been lowered to the builtins below. */
7465 gcc_unreachable ();
7467 case BUILT_IN_SETJMP_SETUP:
7468 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7469 and the receiver label. */
7470 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7472 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7473 VOIDmode, EXPAND_NORMAL);
7474 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7475 rtx_insn *label_r = label_rtx (label);
7477 /* This is copied from the handling of non-local gotos. */
7478 expand_builtin_setjmp_setup (buf_addr, label_r);
7479 nonlocal_goto_handler_labels
7480 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7481 nonlocal_goto_handler_labels);
7482 /* ??? Do not let expand_label treat us as such since we would
7483 not want to be both on the list of non-local labels and on
7484 the list of forced labels. */
7485 FORCED_LABEL (label) = 0;
7486 return const0_rtx;
7488 break;
7490 case BUILT_IN_SETJMP_RECEIVER:
7491 /* __builtin_setjmp_receiver is passed the receiver label. */
7492 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7494 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7495 rtx_insn *label_r = label_rtx (label);
7497 expand_builtin_setjmp_receiver (label_r);
7498 return const0_rtx;
7500 break;
7502 /* __builtin_longjmp is passed a pointer to an array of five words.
7503 It's similar to the C library longjmp function but works with
7504 __builtin_setjmp above. */
7505 case BUILT_IN_LONGJMP:
7506 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7508 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7509 VOIDmode, EXPAND_NORMAL);
7510 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7512 if (value != const1_rtx)
7514 error ("%<__builtin_longjmp%> second argument must be 1");
7515 return const0_rtx;
7518 expand_builtin_longjmp (buf_addr, value);
7519 return const0_rtx;
7521 break;
7523 case BUILT_IN_NONLOCAL_GOTO:
7524 target = expand_builtin_nonlocal_goto (exp);
7525 if (target)
7526 return target;
7527 break;
7529 /* This updates the setjmp buffer that is its argument with the value
7530 of the current stack pointer. */
7531 case BUILT_IN_UPDATE_SETJMP_BUF:
7532 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7534 rtx buf_addr
7535 = expand_normal (CALL_EXPR_ARG (exp, 0));
7537 expand_builtin_update_setjmp_buf (buf_addr);
7538 return const0_rtx;
7540 break;
7542 case BUILT_IN_TRAP:
7543 expand_builtin_trap ();
7544 return const0_rtx;
7546 case BUILT_IN_UNREACHABLE:
7547 expand_builtin_unreachable ();
7548 return const0_rtx;
7550 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7551 case BUILT_IN_SIGNBITD32:
7552 case BUILT_IN_SIGNBITD64:
7553 case BUILT_IN_SIGNBITD128:
7554 target = expand_builtin_signbit (exp, target);
7555 if (target)
7556 return target;
7557 break;
7559 /* Various hooks for the DWARF 2 __throw routine. */
7560 case BUILT_IN_UNWIND_INIT:
7561 expand_builtin_unwind_init ();
7562 return const0_rtx;
7563 case BUILT_IN_DWARF_CFA:
7564 return virtual_cfa_rtx;
7565 #ifdef DWARF2_UNWIND_INFO
7566 case BUILT_IN_DWARF_SP_COLUMN:
7567 return expand_builtin_dwarf_sp_column ();
7568 case BUILT_IN_INIT_DWARF_REG_SIZES:
7569 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7570 return const0_rtx;
7571 #endif
7572 case BUILT_IN_FROB_RETURN_ADDR:
7573 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7574 case BUILT_IN_EXTRACT_RETURN_ADDR:
7575 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7576 case BUILT_IN_EH_RETURN:
7577 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7578 CALL_EXPR_ARG (exp, 1));
7579 return const0_rtx;
7580 case BUILT_IN_EH_RETURN_DATA_REGNO:
7581 return expand_builtin_eh_return_data_regno (exp);
7582 case BUILT_IN_EXTEND_POINTER:
7583 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7584 case BUILT_IN_EH_POINTER:
7585 return expand_builtin_eh_pointer (exp);
7586 case BUILT_IN_EH_FILTER:
7587 return expand_builtin_eh_filter (exp);
7588 case BUILT_IN_EH_COPY_VALUES:
7589 return expand_builtin_eh_copy_values (exp);
7591 case BUILT_IN_VA_START:
7592 return expand_builtin_va_start (exp);
7593 case BUILT_IN_VA_END:
7594 return expand_builtin_va_end (exp);
7595 case BUILT_IN_VA_COPY:
7596 return expand_builtin_va_copy (exp);
7597 case BUILT_IN_EXPECT:
7598 return expand_builtin_expect (exp, target);
7599 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7600 return expand_builtin_expect_with_probability (exp, target);
7601 case BUILT_IN_ASSUME_ALIGNED:
7602 return expand_builtin_assume_aligned (exp, target);
7603 case BUILT_IN_PREFETCH:
7604 expand_builtin_prefetch (exp);
7605 return const0_rtx;
7607 case BUILT_IN_INIT_TRAMPOLINE:
7608 return expand_builtin_init_trampoline (exp, true);
7609 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7610 return expand_builtin_init_trampoline (exp, false);
7611 case BUILT_IN_ADJUST_TRAMPOLINE:
7612 return expand_builtin_adjust_trampoline (exp);
7614 case BUILT_IN_INIT_DESCRIPTOR:
7615 return expand_builtin_init_descriptor (exp);
7616 case BUILT_IN_ADJUST_DESCRIPTOR:
7617 return expand_builtin_adjust_descriptor (exp);
7619 case BUILT_IN_FORK:
7620 case BUILT_IN_EXECL:
7621 case BUILT_IN_EXECV:
7622 case BUILT_IN_EXECLP:
7623 case BUILT_IN_EXECLE:
7624 case BUILT_IN_EXECVP:
7625 case BUILT_IN_EXECVE:
7626 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7627 if (target)
7628 return target;
7629 break;
7631 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7632 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7633 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7634 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7635 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7637 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7638 if (target)
7639 return target;
7640 break;
7642 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7643 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7644 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7645 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7646 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7647 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7648 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7649 if (target)
7650 return target;
7651 break;
7653 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7654 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7655 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7656 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7657 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7658 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7659 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7660 if (target)
7661 return target;
7662 break;
7664 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7665 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7666 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7667 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7668 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7669 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7670 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7671 if (target)
7672 return target;
7673 break;
7675 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7676 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7677 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7678 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7679 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7680 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7681 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7682 if (target)
7683 return target;
7684 break;
7686 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7687 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7688 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7689 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7690 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7691 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7692 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7693 if (target)
7694 return target;
7695 break;
7697 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7698 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7699 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7700 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7701 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7702 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7703 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7704 if (target)
7705 return target;
7706 break;
7708 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7709 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7710 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7711 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7712 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7713 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7714 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7715 if (target)
7716 return target;
7717 break;
7719 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7720 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7721 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7722 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7723 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7724 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7725 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7726 if (target)
7727 return target;
7728 break;
7730 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7731 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7732 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7733 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7734 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7735 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7736 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7737 if (target)
7738 return target;
7739 break;
7741 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7742 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7743 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7744 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7745 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7746 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7747 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7748 if (target)
7749 return target;
7750 break;
7752 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7753 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7754 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7755 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7756 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7757 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7758 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7759 if (target)
7760 return target;
7761 break;
7763 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7764 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7765 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7766 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7767 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7768 if (mode == VOIDmode)
7769 mode = TYPE_MODE (boolean_type_node);
7770 if (!target || !register_operand (target, mode))
7771 target = gen_reg_rtx (mode);
7773 mode = get_builtin_sync_mode
7774 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7775 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7776 if (target)
7777 return target;
7778 break;
7780 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7781 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7782 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7783 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7784 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7785 mode = get_builtin_sync_mode
7786 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7787 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7788 if (target)
7789 return target;
7790 break;
7792 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7793 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7794 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7795 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7796 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7797 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7798 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7799 if (target)
7800 return target;
7801 break;
7803 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7804 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7805 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7806 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7807 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7808 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7809 expand_builtin_sync_lock_release (mode, exp);
7810 return const0_rtx;
7812 case BUILT_IN_SYNC_SYNCHRONIZE:
7813 expand_builtin_sync_synchronize ();
7814 return const0_rtx;
7816 case BUILT_IN_ATOMIC_EXCHANGE_1:
7817 case BUILT_IN_ATOMIC_EXCHANGE_2:
7818 case BUILT_IN_ATOMIC_EXCHANGE_4:
7819 case BUILT_IN_ATOMIC_EXCHANGE_8:
7820 case BUILT_IN_ATOMIC_EXCHANGE_16:
7821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7822 target = expand_builtin_atomic_exchange (mode, exp, target);
7823 if (target)
7824 return target;
7825 break;
7827 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7828 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7829 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7830 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7831 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7833 unsigned int nargs, z;
7834 vec<tree, va_gc> *vec;
7836 mode =
7837 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7838 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7839 if (target)
7840 return target;
7842 /* If this is turned into an external library call, the weak parameter
7843 must be dropped to match the expected parameter list. */
7844 nargs = call_expr_nargs (exp);
7845 vec_alloc (vec, nargs - 1);
7846 for (z = 0; z < 3; z++)
7847 vec->quick_push (CALL_EXPR_ARG (exp, z));
7848 /* Skip the boolean weak parameter. */
7849 for (z = 4; z < 6; z++)
7850 vec->quick_push (CALL_EXPR_ARG (exp, z));
7851 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7852 break;
7855 case BUILT_IN_ATOMIC_LOAD_1:
7856 case BUILT_IN_ATOMIC_LOAD_2:
7857 case BUILT_IN_ATOMIC_LOAD_4:
7858 case BUILT_IN_ATOMIC_LOAD_8:
7859 case BUILT_IN_ATOMIC_LOAD_16:
7860 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7861 target = expand_builtin_atomic_load (mode, exp, target);
7862 if (target)
7863 return target;
7864 break;
7866 case BUILT_IN_ATOMIC_STORE_1:
7867 case BUILT_IN_ATOMIC_STORE_2:
7868 case BUILT_IN_ATOMIC_STORE_4:
7869 case BUILT_IN_ATOMIC_STORE_8:
7870 case BUILT_IN_ATOMIC_STORE_16:
7871 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7872 target = expand_builtin_atomic_store (mode, exp);
7873 if (target)
7874 return const0_rtx;
7875 break;
7877 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7878 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7879 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7880 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7881 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7883 enum built_in_function lib;
7884 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7885 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7886 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7887 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7888 ignore, lib);
7889 if (target)
7890 return target;
7891 break;
7893 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7894 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7895 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7896 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7897 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7899 enum built_in_function lib;
7900 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7901 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7902 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7903 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7904 ignore, lib);
7905 if (target)
7906 return target;
7907 break;
7909 case BUILT_IN_ATOMIC_AND_FETCH_1:
7910 case BUILT_IN_ATOMIC_AND_FETCH_2:
7911 case BUILT_IN_ATOMIC_AND_FETCH_4:
7912 case BUILT_IN_ATOMIC_AND_FETCH_8:
7913 case BUILT_IN_ATOMIC_AND_FETCH_16:
7915 enum built_in_function lib;
7916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7917 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7918 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7919 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7920 ignore, lib);
7921 if (target)
7922 return target;
7923 break;
7925 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7926 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7927 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7928 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7929 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7931 enum built_in_function lib;
7932 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7933 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7934 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7935 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7936 ignore, lib);
7937 if (target)
7938 return target;
7939 break;
7941 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7942 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7943 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7944 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7945 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7947 enum built_in_function lib;
7948 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7949 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7950 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7951 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7952 ignore, lib);
7953 if (target)
7954 return target;
7955 break;
7957 case BUILT_IN_ATOMIC_OR_FETCH_1:
7958 case BUILT_IN_ATOMIC_OR_FETCH_2:
7959 case BUILT_IN_ATOMIC_OR_FETCH_4:
7960 case BUILT_IN_ATOMIC_OR_FETCH_8:
7961 case BUILT_IN_ATOMIC_OR_FETCH_16:
7963 enum built_in_function lib;
7964 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7965 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7966 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7967 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7968 ignore, lib);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7974 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7975 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7976 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7977 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7979 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7980 ignore, BUILT_IN_NONE);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7986 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7987 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7988 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7989 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7990 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7991 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7992 ignore, BUILT_IN_NONE);
7993 if (target)
7994 return target;
7995 break;
7997 case BUILT_IN_ATOMIC_FETCH_AND_1:
7998 case BUILT_IN_ATOMIC_FETCH_AND_2:
7999 case BUILT_IN_ATOMIC_FETCH_AND_4:
8000 case BUILT_IN_ATOMIC_FETCH_AND_8:
8001 case BUILT_IN_ATOMIC_FETCH_AND_16:
8002 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8003 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8004 ignore, BUILT_IN_NONE);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8010 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8011 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8012 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8013 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8014 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8015 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8016 ignore, BUILT_IN_NONE);
8017 if (target)
8018 return target;
8019 break;
8021 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8022 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8023 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8024 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8025 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8026 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8027 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8028 ignore, BUILT_IN_NONE);
8029 if (target)
8030 return target;
8031 break;
8033 case BUILT_IN_ATOMIC_FETCH_OR_1:
8034 case BUILT_IN_ATOMIC_FETCH_OR_2:
8035 case BUILT_IN_ATOMIC_FETCH_OR_4:
8036 case BUILT_IN_ATOMIC_FETCH_OR_8:
8037 case BUILT_IN_ATOMIC_FETCH_OR_16:
8038 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8039 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8040 ignore, BUILT_IN_NONE);
8041 if (target)
8042 return target;
8043 break;
8045 case BUILT_IN_ATOMIC_TEST_AND_SET:
8046 return expand_builtin_atomic_test_and_set (exp, target);
8048 case BUILT_IN_ATOMIC_CLEAR:
8049 return expand_builtin_atomic_clear (exp);
8051 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8052 return expand_builtin_atomic_always_lock_free (exp);
8054 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8055 target = expand_builtin_atomic_is_lock_free (exp);
8056 if (target)
8057 return target;
8058 break;
8060 case BUILT_IN_ATOMIC_THREAD_FENCE:
8061 expand_builtin_atomic_thread_fence (exp);
8062 return const0_rtx;
8064 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8065 expand_builtin_atomic_signal_fence (exp);
8066 return const0_rtx;
8068 case BUILT_IN_OBJECT_SIZE:
8069 return expand_builtin_object_size (exp);
8071 case BUILT_IN_MEMCPY_CHK:
8072 case BUILT_IN_MEMPCPY_CHK:
8073 case BUILT_IN_MEMMOVE_CHK:
8074 case BUILT_IN_MEMSET_CHK:
8075 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8076 if (target)
8077 return target;
8078 break;
8080 case BUILT_IN_STRCPY_CHK:
8081 case BUILT_IN_STPCPY_CHK:
8082 case BUILT_IN_STRNCPY_CHK:
8083 case BUILT_IN_STPNCPY_CHK:
8084 case BUILT_IN_STRCAT_CHK:
8085 case BUILT_IN_STRNCAT_CHK:
8086 case BUILT_IN_SNPRINTF_CHK:
8087 case BUILT_IN_VSNPRINTF_CHK:
8088 maybe_emit_chk_warning (exp, fcode);
8089 break;
8091 case BUILT_IN_SPRINTF_CHK:
8092 case BUILT_IN_VSPRINTF_CHK:
8093 maybe_emit_sprintf_chk_warning (exp, fcode);
8094 break;
8096 case BUILT_IN_FREE:
8097 if (warn_free_nonheap_object)
8098 maybe_emit_free_warning (exp);
8099 break;
8101 case BUILT_IN_THREAD_POINTER:
8102 return expand_builtin_thread_pointer (exp, target);
8104 case BUILT_IN_SET_THREAD_POINTER:
8105 expand_builtin_set_thread_pointer (exp);
8106 return const0_rtx;
8108 case BUILT_IN_ACC_ON_DEVICE:
8109 /* Do library call, if we failed to expand the builtin when
8110 folding. */
8111 break;
8113 case BUILT_IN_GOACC_PARLEVEL_ID:
8114 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8115 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8117 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8118 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8120 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8121 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8122 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8123 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8124 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8125 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8126 return expand_speculation_safe_value (mode, exp, target, ignore);
8128 default: /* just do library call, if unknown builtin */
8129 break;
8132 /* The switch statement above can drop through to cause the function
8133 to be called normally. */
8134 return expand_call (exp, target, ignore);
8137 /* Determine whether a tree node represents a call to a built-in
8138 function. If the tree T is a call to a built-in function with
8139 the right number of arguments of the appropriate types, return
8140 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8141 Otherwise the return value is END_BUILTINS. */
8143 enum built_in_function
8144 builtin_mathfn_code (const_tree t)
8146 const_tree fndecl, arg, parmlist;
8147 const_tree argtype, parmtype;
8148 const_call_expr_arg_iterator iter;
8150 if (TREE_CODE (t) != CALL_EXPR)
8151 return END_BUILTINS;
8153 fndecl = get_callee_fndecl (t);
8154 if (fndecl == NULL_TREE
8155 || TREE_CODE (fndecl) != FUNCTION_DECL
8156 || ! DECL_BUILT_IN (fndecl)
8157 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8158 return END_BUILTINS;
8160 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8161 init_const_call_expr_arg_iterator (t, &iter);
8162 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8164 /* If a function doesn't take a variable number of arguments,
8165 the last element in the list will have type `void'. */
8166 parmtype = TREE_VALUE (parmlist);
8167 if (VOID_TYPE_P (parmtype))
8169 if (more_const_call_expr_args_p (&iter))
8170 return END_BUILTINS;
8171 return DECL_FUNCTION_CODE (fndecl);
8174 if (! more_const_call_expr_args_p (&iter))
8175 return END_BUILTINS;
8177 arg = next_const_call_expr_arg (&iter);
8178 argtype = TREE_TYPE (arg);
8180 if (SCALAR_FLOAT_TYPE_P (parmtype))
8182 if (! SCALAR_FLOAT_TYPE_P (argtype))
8183 return END_BUILTINS;
8185 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8187 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8188 return END_BUILTINS;
8190 else if (POINTER_TYPE_P (parmtype))
8192 if (! POINTER_TYPE_P (argtype))
8193 return END_BUILTINS;
8195 else if (INTEGRAL_TYPE_P (parmtype))
8197 if (! INTEGRAL_TYPE_P (argtype))
8198 return END_BUILTINS;
8200 else
8201 return END_BUILTINS;
8204 /* Variable-length argument list. */
8205 return DECL_FUNCTION_CODE (fndecl);
8208 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8209 evaluate to a constant. */
8211 static tree
8212 fold_builtin_constant_p (tree arg)
8214 /* We return 1 for a numeric type that's known to be a constant
8215 value at compile-time or for an aggregate type that's a
8216 literal constant. */
8217 STRIP_NOPS (arg);
8219 /* If we know this is a constant, emit the constant of one. */
8220 if (CONSTANT_CLASS_P (arg)
8221 || (TREE_CODE (arg) == CONSTRUCTOR
8222 && TREE_CONSTANT (arg)))
8223 return integer_one_node;
8224 if (TREE_CODE (arg) == ADDR_EXPR)
8226 tree op = TREE_OPERAND (arg, 0);
8227 if (TREE_CODE (op) == STRING_CST
8228 || (TREE_CODE (op) == ARRAY_REF
8229 && integer_zerop (TREE_OPERAND (op, 1))
8230 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8231 return integer_one_node;
8234 /* If this expression has side effects, show we don't know it to be a
8235 constant. Likewise if it's a pointer or aggregate type since in
8236 those case we only want literals, since those are only optimized
8237 when generating RTL, not later.
8238 And finally, if we are compiling an initializer, not code, we
8239 need to return a definite result now; there's not going to be any
8240 more optimization done. */
8241 if (TREE_SIDE_EFFECTS (arg)
8242 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8243 || POINTER_TYPE_P (TREE_TYPE (arg))
8244 || cfun == 0
8245 || folding_initializer
8246 || force_folding_builtin_constant_p)
8247 return integer_zero_node;
8249 return NULL_TREE;
8252 /* Create builtin_expect or builtin_expect_with_probability
8253 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8254 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8255 builtin_expect_with_probability instead uses third argument as PROBABILITY
8256 value. */
8258 static tree
8259 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8260 tree predictor, tree probability)
8262 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8264 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8265 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8266 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8267 ret_type = TREE_TYPE (TREE_TYPE (fn));
8268 pred_type = TREE_VALUE (arg_types);
8269 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8271 pred = fold_convert_loc (loc, pred_type, pred);
8272 expected = fold_convert_loc (loc, expected_type, expected);
8274 if (probability)
8275 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8276 else
8277 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8278 predictor);
8280 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8281 build_int_cst (ret_type, 0));
8284 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8285 NULL_TREE if no simplification is possible. */
8287 tree
8288 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8289 tree arg3)
8291 tree inner, fndecl, inner_arg0;
8292 enum tree_code code;
8294 /* Distribute the expected value over short-circuiting operators.
8295 See through the cast from truthvalue_type_node to long. */
8296 inner_arg0 = arg0;
8297 while (CONVERT_EXPR_P (inner_arg0)
8298 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8299 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8300 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8302 /* If this is a builtin_expect within a builtin_expect keep the
8303 inner one. See through a comparison against a constant. It
8304 might have been added to create a thruthvalue. */
8305 inner = inner_arg0;
8307 if (COMPARISON_CLASS_P (inner)
8308 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8309 inner = TREE_OPERAND (inner, 0);
8311 if (TREE_CODE (inner) == CALL_EXPR
8312 && (fndecl = get_callee_fndecl (inner))
8313 && (DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL, BUILT_IN_EXPECT)
8314 || DECL_BUILT_IN_P (fndecl, BUILT_IN_NORMAL,
8315 BUILT_IN_EXPECT_WITH_PROBABILITY)))
8316 return arg0;
8318 inner = inner_arg0;
8319 code = TREE_CODE (inner);
8320 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8322 tree op0 = TREE_OPERAND (inner, 0);
8323 tree op1 = TREE_OPERAND (inner, 1);
8324 arg1 = save_expr (arg1);
8326 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8327 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8328 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8330 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8333 /* If the argument isn't invariant then there's nothing else we can do. */
8334 if (!TREE_CONSTANT (inner_arg0))
8335 return NULL_TREE;
8337 /* If we expect that a comparison against the argument will fold to
8338 a constant return the constant. In practice, this means a true
8339 constant or the address of a non-weak symbol. */
8340 inner = inner_arg0;
8341 STRIP_NOPS (inner);
8342 if (TREE_CODE (inner) == ADDR_EXPR)
8346 inner = TREE_OPERAND (inner, 0);
8348 while (TREE_CODE (inner) == COMPONENT_REF
8349 || TREE_CODE (inner) == ARRAY_REF);
8350 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8351 return NULL_TREE;
8354 /* Otherwise, ARG0 already has the proper type for the return value. */
8355 return arg0;
8358 /* Fold a call to __builtin_classify_type with argument ARG. */
8360 static tree
8361 fold_builtin_classify_type (tree arg)
8363 if (arg == 0)
8364 return build_int_cst (integer_type_node, no_type_class);
8366 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8369 /* Fold a call to __builtin_strlen with argument ARG. */
8371 static tree
8372 fold_builtin_strlen (location_t loc, tree type, tree arg)
8374 if (!validate_arg (arg, POINTER_TYPE))
8375 return NULL_TREE;
8376 else
8378 tree len = c_strlen (arg, 0);
8380 if (len)
8381 return fold_convert_loc (loc, type, len);
8383 return NULL_TREE;
8387 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8389 static tree
8390 fold_builtin_inf (location_t loc, tree type, int warn)
8392 REAL_VALUE_TYPE real;
8394 /* __builtin_inff is intended to be usable to define INFINITY on all
8395 targets. If an infinity is not available, INFINITY expands "to a
8396 positive constant of type float that overflows at translation
8397 time", footnote "In this case, using INFINITY will violate the
8398 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8399 Thus we pedwarn to ensure this constraint violation is
8400 diagnosed. */
8401 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8402 pedwarn (loc, 0, "target format does not support infinity");
8404 real_inf (&real);
8405 return build_real (type, real);
8408 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8409 NULL_TREE if no simplification can be made. */
8411 static tree
8412 fold_builtin_sincos (location_t loc,
8413 tree arg0, tree arg1, tree arg2)
8415 tree type;
8416 tree fndecl, call = NULL_TREE;
8418 if (!validate_arg (arg0, REAL_TYPE)
8419 || !validate_arg (arg1, POINTER_TYPE)
8420 || !validate_arg (arg2, POINTER_TYPE))
8421 return NULL_TREE;
8423 type = TREE_TYPE (arg0);
8425 /* Calculate the result when the argument is a constant. */
8426 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8427 if (fn == END_BUILTINS)
8428 return NULL_TREE;
8430 /* Canonicalize sincos to cexpi. */
8431 if (TREE_CODE (arg0) == REAL_CST)
8433 tree complex_type = build_complex_type (type);
8434 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8436 if (!call)
8438 if (!targetm.libc_has_function (function_c99_math_complex)
8439 || !builtin_decl_implicit_p (fn))
8440 return NULL_TREE;
8441 fndecl = builtin_decl_explicit (fn);
8442 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8443 call = builtin_save_expr (call);
8446 tree ptype = build_pointer_type (type);
8447 arg1 = fold_convert (ptype, arg1);
8448 arg2 = fold_convert (ptype, arg2);
8449 return build2 (COMPOUND_EXPR, void_type_node,
8450 build2 (MODIFY_EXPR, void_type_node,
8451 build_fold_indirect_ref_loc (loc, arg1),
8452 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8453 build2 (MODIFY_EXPR, void_type_node,
8454 build_fold_indirect_ref_loc (loc, arg2),
8455 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8458 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8459 Return NULL_TREE if no simplification can be made. */
8461 static tree
8462 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8464 if (!validate_arg (arg1, POINTER_TYPE)
8465 || !validate_arg (arg2, POINTER_TYPE)
8466 || !validate_arg (len, INTEGER_TYPE))
8467 return NULL_TREE;
8469 /* If the LEN parameter is zero, return zero. */
8470 if (integer_zerop (len))
8471 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8472 arg1, arg2);
8474 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8475 if (operand_equal_p (arg1, arg2, 0))
8476 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8478 /* If len parameter is one, return an expression corresponding to
8479 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8480 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8482 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8483 tree cst_uchar_ptr_node
8484 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8486 tree ind1
8487 = fold_convert_loc (loc, integer_type_node,
8488 build1 (INDIRECT_REF, cst_uchar_node,
8489 fold_convert_loc (loc,
8490 cst_uchar_ptr_node,
8491 arg1)));
8492 tree ind2
8493 = fold_convert_loc (loc, integer_type_node,
8494 build1 (INDIRECT_REF, cst_uchar_node,
8495 fold_convert_loc (loc,
8496 cst_uchar_ptr_node,
8497 arg2)));
8498 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8501 return NULL_TREE;
8504 /* Fold a call to builtin isascii with argument ARG. */
8506 static tree
8507 fold_builtin_isascii (location_t loc, tree arg)
8509 if (!validate_arg (arg, INTEGER_TYPE))
8510 return NULL_TREE;
8511 else
8513 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8514 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8515 build_int_cst (integer_type_node,
8516 ~ (unsigned HOST_WIDE_INT) 0x7f));
8517 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8518 arg, integer_zero_node);
8522 /* Fold a call to builtin toascii with argument ARG. */
8524 static tree
8525 fold_builtin_toascii (location_t loc, tree arg)
8527 if (!validate_arg (arg, INTEGER_TYPE))
8528 return NULL_TREE;
8530 /* Transform toascii(c) -> (c & 0x7f). */
8531 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8532 build_int_cst (integer_type_node, 0x7f));
8535 /* Fold a call to builtin isdigit with argument ARG. */
8537 static tree
8538 fold_builtin_isdigit (location_t loc, tree arg)
8540 if (!validate_arg (arg, INTEGER_TYPE))
8541 return NULL_TREE;
8542 else
8544 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8545 /* According to the C standard, isdigit is unaffected by locale.
8546 However, it definitely is affected by the target character set. */
8547 unsigned HOST_WIDE_INT target_digit0
8548 = lang_hooks.to_target_charset ('0');
8550 if (target_digit0 == 0)
8551 return NULL_TREE;
8553 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8554 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8555 build_int_cst (unsigned_type_node, target_digit0));
8556 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8557 build_int_cst (unsigned_type_node, 9));
8561 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8563 static tree
8564 fold_builtin_fabs (location_t loc, tree arg, tree type)
8566 if (!validate_arg (arg, REAL_TYPE))
8567 return NULL_TREE;
8569 arg = fold_convert_loc (loc, type, arg);
8570 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8573 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8575 static tree
8576 fold_builtin_abs (location_t loc, tree arg, tree type)
8578 if (!validate_arg (arg, INTEGER_TYPE))
8579 return NULL_TREE;
8581 arg = fold_convert_loc (loc, type, arg);
8582 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8585 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8587 static tree
8588 fold_builtin_carg (location_t loc, tree arg, tree type)
8590 if (validate_arg (arg, COMPLEX_TYPE)
8591 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8593 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8595 if (atan2_fn)
8597 tree new_arg = builtin_save_expr (arg);
8598 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8599 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8600 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8604 return NULL_TREE;
8607 /* Fold a call to builtin frexp, we can assume the base is 2. */
8609 static tree
8610 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8612 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8613 return NULL_TREE;
8615 STRIP_NOPS (arg0);
8617 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8618 return NULL_TREE;
8620 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8622 /* Proceed if a valid pointer type was passed in. */
8623 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8625 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8626 tree frac, exp;
8628 switch (value->cl)
8630 case rvc_zero:
8631 /* For +-0, return (*exp = 0, +-0). */
8632 exp = integer_zero_node;
8633 frac = arg0;
8634 break;
8635 case rvc_nan:
8636 case rvc_inf:
8637 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8638 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8639 case rvc_normal:
8641 /* Since the frexp function always expects base 2, and in
8642 GCC normalized significands are already in the range
8643 [0.5, 1.0), we have exactly what frexp wants. */
8644 REAL_VALUE_TYPE frac_rvt = *value;
8645 SET_REAL_EXP (&frac_rvt, 0);
8646 frac = build_real (rettype, frac_rvt);
8647 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8649 break;
8650 default:
8651 gcc_unreachable ();
8654 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8655 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8656 TREE_SIDE_EFFECTS (arg1) = 1;
8657 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8660 return NULL_TREE;
8663 /* Fold a call to builtin modf. */
8665 static tree
8666 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8668 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8669 return NULL_TREE;
8671 STRIP_NOPS (arg0);
8673 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8674 return NULL_TREE;
8676 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8678 /* Proceed if a valid pointer type was passed in. */
8679 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8681 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8682 REAL_VALUE_TYPE trunc, frac;
8684 switch (value->cl)
8686 case rvc_nan:
8687 case rvc_zero:
8688 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8689 trunc = frac = *value;
8690 break;
8691 case rvc_inf:
8692 /* For +-Inf, return (*arg1 = arg0, +-0). */
8693 frac = dconst0;
8694 frac.sign = value->sign;
8695 trunc = *value;
8696 break;
8697 case rvc_normal:
8698 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8699 real_trunc (&trunc, VOIDmode, value);
8700 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8701 /* If the original number was negative and already
8702 integral, then the fractional part is -0.0. */
8703 if (value->sign && frac.cl == rvc_zero)
8704 frac.sign = value->sign;
8705 break;
8708 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8709 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8710 build_real (rettype, trunc));
8711 TREE_SIDE_EFFECTS (arg1) = 1;
8712 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8713 build_real (rettype, frac));
8716 return NULL_TREE;
8719 /* Given a location LOC, an interclass builtin function decl FNDECL
8720 and its single argument ARG, return an folded expression computing
8721 the same, or NULL_TREE if we either couldn't or didn't want to fold
8722 (the latter happen if there's an RTL instruction available). */
8724 static tree
8725 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8727 machine_mode mode;
8729 if (!validate_arg (arg, REAL_TYPE))
8730 return NULL_TREE;
8732 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8733 return NULL_TREE;
8735 mode = TYPE_MODE (TREE_TYPE (arg));
8737 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8739 /* If there is no optab, try generic code. */
8740 switch (DECL_FUNCTION_CODE (fndecl))
8742 tree result;
8744 CASE_FLT_FN (BUILT_IN_ISINF):
8746 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8747 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8748 tree type = TREE_TYPE (arg);
8749 REAL_VALUE_TYPE r;
8750 char buf[128];
8752 if (is_ibm_extended)
8754 /* NaN and Inf are encoded in the high-order double value
8755 only. The low-order value is not significant. */
8756 type = double_type_node;
8757 mode = DFmode;
8758 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8760 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8761 real_from_string (&r, buf);
8762 result = build_call_expr (isgr_fn, 2,
8763 fold_build1_loc (loc, ABS_EXPR, type, arg),
8764 build_real (type, r));
8765 return result;
8767 CASE_FLT_FN (BUILT_IN_FINITE):
8768 case BUILT_IN_ISFINITE:
8770 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8771 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8772 tree type = TREE_TYPE (arg);
8773 REAL_VALUE_TYPE r;
8774 char buf[128];
8776 if (is_ibm_extended)
8778 /* NaN and Inf are encoded in the high-order double value
8779 only. The low-order value is not significant. */
8780 type = double_type_node;
8781 mode = DFmode;
8782 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8784 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8785 real_from_string (&r, buf);
8786 result = build_call_expr (isle_fn, 2,
8787 fold_build1_loc (loc, ABS_EXPR, type, arg),
8788 build_real (type, r));
8789 /*result = fold_build2_loc (loc, UNGT_EXPR,
8790 TREE_TYPE (TREE_TYPE (fndecl)),
8791 fold_build1_loc (loc, ABS_EXPR, type, arg),
8792 build_real (type, r));
8793 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8794 TREE_TYPE (TREE_TYPE (fndecl)),
8795 result);*/
8796 return result;
8798 case BUILT_IN_ISNORMAL:
8800 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8801 islessequal(fabs(x),DBL_MAX). */
8802 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8803 tree type = TREE_TYPE (arg);
8804 tree orig_arg, max_exp, min_exp;
8805 machine_mode orig_mode = mode;
8806 REAL_VALUE_TYPE rmax, rmin;
8807 char buf[128];
8809 orig_arg = arg = builtin_save_expr (arg);
8810 if (is_ibm_extended)
8812 /* Use double to test the normal range of IBM extended
8813 precision. Emin for IBM extended precision is
8814 different to emin for IEEE double, being 53 higher
8815 since the low double exponent is at least 53 lower
8816 than the high double exponent. */
8817 type = double_type_node;
8818 mode = DFmode;
8819 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8821 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8823 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8824 real_from_string (&rmax, buf);
8825 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8826 real_from_string (&rmin, buf);
8827 max_exp = build_real (type, rmax);
8828 min_exp = build_real (type, rmin);
8830 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8831 if (is_ibm_extended)
8833 /* Testing the high end of the range is done just using
8834 the high double, using the same test as isfinite().
8835 For the subnormal end of the range we first test the
8836 high double, then if its magnitude is equal to the
8837 limit of 0x1p-969, we test whether the low double is
8838 non-zero and opposite sign to the high double. */
8839 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8840 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8841 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8842 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8843 arg, min_exp);
8844 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8845 complex_double_type_node, orig_arg);
8846 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8847 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8848 tree zero = build_real (type, dconst0);
8849 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8850 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8851 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8852 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8853 fold_build3 (COND_EXPR,
8854 integer_type_node,
8855 hilt, logt, lolt));
8856 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8857 eq_min, ok_lo);
8858 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8859 gt_min, eq_min);
8861 else
8863 tree const isge_fn
8864 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8865 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8867 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8868 max_exp, min_exp);
8869 return result;
8871 default:
8872 break;
8875 return NULL_TREE;
8878 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8879 ARG is the argument for the call. */
8881 static tree
8882 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8886 if (!validate_arg (arg, REAL_TYPE))
8887 return NULL_TREE;
8889 switch (builtin_index)
8891 case BUILT_IN_ISINF:
8892 if (!HONOR_INFINITIES (arg))
8893 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8895 return NULL_TREE;
8897 case BUILT_IN_ISINF_SIGN:
8899 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8900 /* In a boolean context, GCC will fold the inner COND_EXPR to
8901 1. So e.g. "if (isinf_sign(x))" would be folded to just
8902 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8903 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8904 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8905 tree tmp = NULL_TREE;
8907 arg = builtin_save_expr (arg);
8909 if (signbit_fn && isinf_fn)
8911 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8912 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8914 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8915 signbit_call, integer_zero_node);
8916 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8917 isinf_call, integer_zero_node);
8919 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8920 integer_minus_one_node, integer_one_node);
8921 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8922 isinf_call, tmp,
8923 integer_zero_node);
8926 return tmp;
8929 case BUILT_IN_ISFINITE:
8930 if (!HONOR_NANS (arg)
8931 && !HONOR_INFINITIES (arg))
8932 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8934 return NULL_TREE;
8936 case BUILT_IN_ISNAN:
8937 if (!HONOR_NANS (arg))
8938 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8941 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8942 if (is_ibm_extended)
8944 /* NaN and Inf are encoded in the high-order double value
8945 only. The low-order value is not significant. */
8946 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8949 arg = builtin_save_expr (arg);
8950 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8952 default:
8953 gcc_unreachable ();
8957 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8958 This builtin will generate code to return the appropriate floating
8959 point classification depending on the value of the floating point
8960 number passed in. The possible return values must be supplied as
8961 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8962 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8963 one floating point argument which is "type generic". */
8965 static tree
8966 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8968 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8969 arg, type, res, tmp;
8970 machine_mode mode;
8971 REAL_VALUE_TYPE r;
8972 char buf[128];
8974 /* Verify the required arguments in the original call. */
8975 if (nargs != 6
8976 || !validate_arg (args[0], INTEGER_TYPE)
8977 || !validate_arg (args[1], INTEGER_TYPE)
8978 || !validate_arg (args[2], INTEGER_TYPE)
8979 || !validate_arg (args[3], INTEGER_TYPE)
8980 || !validate_arg (args[4], INTEGER_TYPE)
8981 || !validate_arg (args[5], REAL_TYPE))
8982 return NULL_TREE;
8984 fp_nan = args[0];
8985 fp_infinite = args[1];
8986 fp_normal = args[2];
8987 fp_subnormal = args[3];
8988 fp_zero = args[4];
8989 arg = args[5];
8990 type = TREE_TYPE (arg);
8991 mode = TYPE_MODE (type);
8992 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8994 /* fpclassify(x) ->
8995 isnan(x) ? FP_NAN :
8996 (fabs(x) == Inf ? FP_INFINITE :
8997 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8998 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9000 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9001 build_real (type, dconst0));
9002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9003 tmp, fp_zero, fp_subnormal);
9005 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9006 real_from_string (&r, buf);
9007 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9008 arg, build_real (type, r));
9009 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9011 if (HONOR_INFINITIES (mode))
9013 real_inf (&r);
9014 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9015 build_real (type, r));
9016 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9017 fp_infinite, res);
9020 if (HONOR_NANS (mode))
9022 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9023 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9026 return res;
9029 /* Fold a call to an unordered comparison function such as
9030 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9031 being called and ARG0 and ARG1 are the arguments for the call.
9032 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9033 the opposite of the desired result. UNORDERED_CODE is used
9034 for modes that can hold NaNs and ORDERED_CODE is used for
9035 the rest. */
9037 static tree
9038 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9039 enum tree_code unordered_code,
9040 enum tree_code ordered_code)
9042 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9043 enum tree_code code;
9044 tree type0, type1;
9045 enum tree_code code0, code1;
9046 tree cmp_type = NULL_TREE;
9048 type0 = TREE_TYPE (arg0);
9049 type1 = TREE_TYPE (arg1);
9051 code0 = TREE_CODE (type0);
9052 code1 = TREE_CODE (type1);
9054 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9055 /* Choose the wider of two real types. */
9056 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9057 ? type0 : type1;
9058 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9059 cmp_type = type0;
9060 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9061 cmp_type = type1;
9063 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9064 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9066 if (unordered_code == UNORDERED_EXPR)
9068 if (!HONOR_NANS (arg0))
9069 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9070 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9073 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9074 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9075 fold_build2_loc (loc, code, type, arg0, arg1));
9078 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9079 arithmetics if it can never overflow, or into internal functions that
9080 return both result of arithmetics and overflowed boolean flag in
9081 a complex integer result, or some other check for overflow.
9082 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9083 checking part of that. */
9085 static tree
9086 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9087 tree arg0, tree arg1, tree arg2)
9089 enum internal_fn ifn = IFN_LAST;
9090 /* The code of the expression corresponding to the type-generic
9091 built-in, or ERROR_MARK for the type-specific ones. */
9092 enum tree_code opcode = ERROR_MARK;
9093 bool ovf_only = false;
9095 switch (fcode)
9097 case BUILT_IN_ADD_OVERFLOW_P:
9098 ovf_only = true;
9099 /* FALLTHRU */
9100 case BUILT_IN_ADD_OVERFLOW:
9101 opcode = PLUS_EXPR;
9102 /* FALLTHRU */
9103 case BUILT_IN_SADD_OVERFLOW:
9104 case BUILT_IN_SADDL_OVERFLOW:
9105 case BUILT_IN_SADDLL_OVERFLOW:
9106 case BUILT_IN_UADD_OVERFLOW:
9107 case BUILT_IN_UADDL_OVERFLOW:
9108 case BUILT_IN_UADDLL_OVERFLOW:
9109 ifn = IFN_ADD_OVERFLOW;
9110 break;
9111 case BUILT_IN_SUB_OVERFLOW_P:
9112 ovf_only = true;
9113 /* FALLTHRU */
9114 case BUILT_IN_SUB_OVERFLOW:
9115 opcode = MINUS_EXPR;
9116 /* FALLTHRU */
9117 case BUILT_IN_SSUB_OVERFLOW:
9118 case BUILT_IN_SSUBL_OVERFLOW:
9119 case BUILT_IN_SSUBLL_OVERFLOW:
9120 case BUILT_IN_USUB_OVERFLOW:
9121 case BUILT_IN_USUBL_OVERFLOW:
9122 case BUILT_IN_USUBLL_OVERFLOW:
9123 ifn = IFN_SUB_OVERFLOW;
9124 break;
9125 case BUILT_IN_MUL_OVERFLOW_P:
9126 ovf_only = true;
9127 /* FALLTHRU */
9128 case BUILT_IN_MUL_OVERFLOW:
9129 opcode = MULT_EXPR;
9130 /* FALLTHRU */
9131 case BUILT_IN_SMUL_OVERFLOW:
9132 case BUILT_IN_SMULL_OVERFLOW:
9133 case BUILT_IN_SMULLL_OVERFLOW:
9134 case BUILT_IN_UMUL_OVERFLOW:
9135 case BUILT_IN_UMULL_OVERFLOW:
9136 case BUILT_IN_UMULLL_OVERFLOW:
9137 ifn = IFN_MUL_OVERFLOW;
9138 break;
9139 default:
9140 gcc_unreachable ();
9143 /* For the "generic" overloads, the first two arguments can have different
9144 types and the last argument determines the target type to use to check
9145 for overflow. The arguments of the other overloads all have the same
9146 type. */
9147 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9149 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9150 arguments are constant, attempt to fold the built-in call into a constant
9151 expression indicating whether or not it detected an overflow. */
9152 if (ovf_only
9153 && TREE_CODE (arg0) == INTEGER_CST
9154 && TREE_CODE (arg1) == INTEGER_CST)
9155 /* Perform the computation in the target type and check for overflow. */
9156 return omit_one_operand_loc (loc, boolean_type_node,
9157 arith_overflowed_p (opcode, type, arg0, arg1)
9158 ? boolean_true_node : boolean_false_node,
9159 arg2);
9161 tree ctype = build_complex_type (type);
9162 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9163 2, arg0, arg1);
9164 tree tgt = save_expr (call);
9165 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9166 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9167 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9169 if (ovf_only)
9170 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9172 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9173 tree store
9174 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9175 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9178 /* Fold a call to __builtin_FILE to a constant string. */
9180 static inline tree
9181 fold_builtin_FILE (location_t loc)
9183 if (const char *fname = LOCATION_FILE (loc))
9185 /* The documentation says this builtin is equivalent to the preprocessor
9186 __FILE__ macro so it appears appropriate to use the same file prefix
9187 mappings. */
9188 fname = remap_macro_filename (fname);
9189 return build_string_literal (strlen (fname) + 1, fname);
9192 return build_string_literal (1, "");
9195 /* Fold a call to __builtin_FUNCTION to a constant string. */
9197 static inline tree
9198 fold_builtin_FUNCTION ()
9200 const char *name = "";
9202 if (current_function_decl)
9203 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9205 return build_string_literal (strlen (name) + 1, name);
9208 /* Fold a call to __builtin_LINE to an integer constant. */
9210 static inline tree
9211 fold_builtin_LINE (location_t loc, tree type)
9213 return build_int_cst (type, LOCATION_LINE (loc));
9216 /* Fold a call to built-in function FNDECL with 0 arguments.
9217 This function returns NULL_TREE if no simplification was possible. */
9219 static tree
9220 fold_builtin_0 (location_t loc, tree fndecl)
9222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9224 switch (fcode)
9226 case BUILT_IN_FILE:
9227 return fold_builtin_FILE (loc);
9229 case BUILT_IN_FUNCTION:
9230 return fold_builtin_FUNCTION ();
9232 case BUILT_IN_LINE:
9233 return fold_builtin_LINE (loc, type);
9235 CASE_FLT_FN (BUILT_IN_INF):
9236 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9237 case BUILT_IN_INFD32:
9238 case BUILT_IN_INFD64:
9239 case BUILT_IN_INFD128:
9240 return fold_builtin_inf (loc, type, true);
9242 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9243 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9244 return fold_builtin_inf (loc, type, false);
9246 case BUILT_IN_CLASSIFY_TYPE:
9247 return fold_builtin_classify_type (NULL_TREE);
9249 default:
9250 break;
9252 return NULL_TREE;
9255 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9256 This function returns NULL_TREE if no simplification was possible. */
9258 static tree
9259 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9261 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9262 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9264 if (TREE_CODE (arg0) == ERROR_MARK)
9265 return NULL_TREE;
9267 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9268 return ret;
9270 switch (fcode)
9272 case BUILT_IN_CONSTANT_P:
9274 tree val = fold_builtin_constant_p (arg0);
9276 /* Gimplification will pull the CALL_EXPR for the builtin out of
9277 an if condition. When not optimizing, we'll not CSE it back.
9278 To avoid link error types of regressions, return false now. */
9279 if (!val && !optimize)
9280 val = integer_zero_node;
9282 return val;
9285 case BUILT_IN_CLASSIFY_TYPE:
9286 return fold_builtin_classify_type (arg0);
9288 case BUILT_IN_STRLEN:
9289 return fold_builtin_strlen (loc, type, arg0);
9291 CASE_FLT_FN (BUILT_IN_FABS):
9292 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9293 case BUILT_IN_FABSD32:
9294 case BUILT_IN_FABSD64:
9295 case BUILT_IN_FABSD128:
9296 return fold_builtin_fabs (loc, arg0, type);
9298 case BUILT_IN_ABS:
9299 case BUILT_IN_LABS:
9300 case BUILT_IN_LLABS:
9301 case BUILT_IN_IMAXABS:
9302 return fold_builtin_abs (loc, arg0, type);
9304 CASE_FLT_FN (BUILT_IN_CONJ):
9305 if (validate_arg (arg0, COMPLEX_TYPE)
9306 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9307 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9308 break;
9310 CASE_FLT_FN (BUILT_IN_CREAL):
9311 if (validate_arg (arg0, COMPLEX_TYPE)
9312 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9313 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9314 break;
9316 CASE_FLT_FN (BUILT_IN_CIMAG):
9317 if (validate_arg (arg0, COMPLEX_TYPE)
9318 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9319 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9320 break;
9322 CASE_FLT_FN (BUILT_IN_CARG):
9323 return fold_builtin_carg (loc, arg0, type);
9325 case BUILT_IN_ISASCII:
9326 return fold_builtin_isascii (loc, arg0);
9328 case BUILT_IN_TOASCII:
9329 return fold_builtin_toascii (loc, arg0);
9331 case BUILT_IN_ISDIGIT:
9332 return fold_builtin_isdigit (loc, arg0);
9334 CASE_FLT_FN (BUILT_IN_FINITE):
9335 case BUILT_IN_FINITED32:
9336 case BUILT_IN_FINITED64:
9337 case BUILT_IN_FINITED128:
9338 case BUILT_IN_ISFINITE:
9340 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9341 if (ret)
9342 return ret;
9343 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9346 CASE_FLT_FN (BUILT_IN_ISINF):
9347 case BUILT_IN_ISINFD32:
9348 case BUILT_IN_ISINFD64:
9349 case BUILT_IN_ISINFD128:
9351 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9352 if (ret)
9353 return ret;
9354 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9357 case BUILT_IN_ISNORMAL:
9358 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9360 case BUILT_IN_ISINF_SIGN:
9361 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9363 CASE_FLT_FN (BUILT_IN_ISNAN):
9364 case BUILT_IN_ISNAND32:
9365 case BUILT_IN_ISNAND64:
9366 case BUILT_IN_ISNAND128:
9367 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9369 case BUILT_IN_FREE:
9370 if (integer_zerop (arg0))
9371 return build_empty_stmt (loc);
9372 break;
9374 default:
9375 break;
9378 return NULL_TREE;
9382 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9383 This function returns NULL_TREE if no simplification was possible. */
9385 static tree
9386 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9389 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9391 if (TREE_CODE (arg0) == ERROR_MARK
9392 || TREE_CODE (arg1) == ERROR_MARK)
9393 return NULL_TREE;
9395 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9396 return ret;
9398 switch (fcode)
9400 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9401 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9402 if (validate_arg (arg0, REAL_TYPE)
9403 && validate_arg (arg1, POINTER_TYPE))
9404 return do_mpfr_lgamma_r (arg0, arg1, type);
9405 break;
9407 CASE_FLT_FN (BUILT_IN_FREXP):
9408 return fold_builtin_frexp (loc, arg0, arg1, type);
9410 CASE_FLT_FN (BUILT_IN_MODF):
9411 return fold_builtin_modf (loc, arg0, arg1, type);
9413 case BUILT_IN_STRSPN:
9414 return fold_builtin_strspn (loc, arg0, arg1);
9416 case BUILT_IN_STRCSPN:
9417 return fold_builtin_strcspn (loc, arg0, arg1);
9419 case BUILT_IN_STRPBRK:
9420 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9422 case BUILT_IN_EXPECT:
9423 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9425 case BUILT_IN_ISGREATER:
9426 return fold_builtin_unordered_cmp (loc, fndecl,
9427 arg0, arg1, UNLE_EXPR, LE_EXPR);
9428 case BUILT_IN_ISGREATEREQUAL:
9429 return fold_builtin_unordered_cmp (loc, fndecl,
9430 arg0, arg1, UNLT_EXPR, LT_EXPR);
9431 case BUILT_IN_ISLESS:
9432 return fold_builtin_unordered_cmp (loc, fndecl,
9433 arg0, arg1, UNGE_EXPR, GE_EXPR);
9434 case BUILT_IN_ISLESSEQUAL:
9435 return fold_builtin_unordered_cmp (loc, fndecl,
9436 arg0, arg1, UNGT_EXPR, GT_EXPR);
9437 case BUILT_IN_ISLESSGREATER:
9438 return fold_builtin_unordered_cmp (loc, fndecl,
9439 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9440 case BUILT_IN_ISUNORDERED:
9441 return fold_builtin_unordered_cmp (loc, fndecl,
9442 arg0, arg1, UNORDERED_EXPR,
9443 NOP_EXPR);
9445 /* We do the folding for va_start in the expander. */
9446 case BUILT_IN_VA_START:
9447 break;
9449 case BUILT_IN_OBJECT_SIZE:
9450 return fold_builtin_object_size (arg0, arg1);
9452 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9453 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9455 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9456 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9458 default:
9459 break;
9461 return NULL_TREE;
9464 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9465 and ARG2.
9466 This function returns NULL_TREE if no simplification was possible. */
9468 static tree
9469 fold_builtin_3 (location_t loc, tree fndecl,
9470 tree arg0, tree arg1, tree arg2)
9472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9473 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9475 if (TREE_CODE (arg0) == ERROR_MARK
9476 || TREE_CODE (arg1) == ERROR_MARK
9477 || TREE_CODE (arg2) == ERROR_MARK)
9478 return NULL_TREE;
9480 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9481 arg0, arg1, arg2))
9482 return ret;
9484 switch (fcode)
9487 CASE_FLT_FN (BUILT_IN_SINCOS):
9488 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9490 CASE_FLT_FN (BUILT_IN_REMQUO):
9491 if (validate_arg (arg0, REAL_TYPE)
9492 && validate_arg (arg1, REAL_TYPE)
9493 && validate_arg (arg2, POINTER_TYPE))
9494 return do_mpfr_remquo (arg0, arg1, arg2);
9495 break;
9497 case BUILT_IN_MEMCMP:
9498 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9500 case BUILT_IN_EXPECT:
9501 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9503 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9504 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9506 case BUILT_IN_ADD_OVERFLOW:
9507 case BUILT_IN_SUB_OVERFLOW:
9508 case BUILT_IN_MUL_OVERFLOW:
9509 case BUILT_IN_ADD_OVERFLOW_P:
9510 case BUILT_IN_SUB_OVERFLOW_P:
9511 case BUILT_IN_MUL_OVERFLOW_P:
9512 case BUILT_IN_SADD_OVERFLOW:
9513 case BUILT_IN_SADDL_OVERFLOW:
9514 case BUILT_IN_SADDLL_OVERFLOW:
9515 case BUILT_IN_SSUB_OVERFLOW:
9516 case BUILT_IN_SSUBL_OVERFLOW:
9517 case BUILT_IN_SSUBLL_OVERFLOW:
9518 case BUILT_IN_SMUL_OVERFLOW:
9519 case BUILT_IN_SMULL_OVERFLOW:
9520 case BUILT_IN_SMULLL_OVERFLOW:
9521 case BUILT_IN_UADD_OVERFLOW:
9522 case BUILT_IN_UADDL_OVERFLOW:
9523 case BUILT_IN_UADDLL_OVERFLOW:
9524 case BUILT_IN_USUB_OVERFLOW:
9525 case BUILT_IN_USUBL_OVERFLOW:
9526 case BUILT_IN_USUBLL_OVERFLOW:
9527 case BUILT_IN_UMUL_OVERFLOW:
9528 case BUILT_IN_UMULL_OVERFLOW:
9529 case BUILT_IN_UMULLL_OVERFLOW:
9530 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9532 default:
9533 break;
9535 return NULL_TREE;
9538 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9539 arguments. IGNORE is true if the result of the
9540 function call is ignored. This function returns NULL_TREE if no
9541 simplification was possible. */
9543 tree
9544 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9546 tree ret = NULL_TREE;
9548 switch (nargs)
9550 case 0:
9551 ret = fold_builtin_0 (loc, fndecl);
9552 break;
9553 case 1:
9554 ret = fold_builtin_1 (loc, fndecl, args[0]);
9555 break;
9556 case 2:
9557 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9558 break;
9559 case 3:
9560 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9561 break;
9562 default:
9563 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9564 break;
9566 if (ret)
9568 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9569 SET_EXPR_LOCATION (ret, loc);
9570 return ret;
9572 return NULL_TREE;
9575 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9576 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9577 of arguments in ARGS to be omitted. OLDNARGS is the number of
9578 elements in ARGS. */
9580 static tree
9581 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9582 int skip, tree fndecl, int n, va_list newargs)
9584 int nargs = oldnargs - skip + n;
9585 tree *buffer;
9587 if (n > 0)
9589 int i, j;
9591 buffer = XALLOCAVEC (tree, nargs);
9592 for (i = 0; i < n; i++)
9593 buffer[i] = va_arg (newargs, tree);
9594 for (j = skip; j < oldnargs; j++, i++)
9595 buffer[i] = args[j];
9597 else
9598 buffer = args + skip;
9600 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9603 /* Return true if FNDECL shouldn't be folded right now.
9604 If a built-in function has an inline attribute always_inline
9605 wrapper, defer folding it after always_inline functions have
9606 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9607 might not be performed. */
9609 bool
9610 avoid_folding_inline_builtin (tree fndecl)
9612 return (DECL_DECLARED_INLINE_P (fndecl)
9613 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9614 && cfun
9615 && !cfun->always_inline_functions_inlined
9616 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9619 /* A wrapper function for builtin folding that prevents warnings for
9620 "statement without effect" and the like, caused by removing the
9621 call node earlier than the warning is generated. */
9623 tree
9624 fold_call_expr (location_t loc, tree exp, bool ignore)
9626 tree ret = NULL_TREE;
9627 tree fndecl = get_callee_fndecl (exp);
9628 if (fndecl
9629 && TREE_CODE (fndecl) == FUNCTION_DECL
9630 && DECL_BUILT_IN (fndecl)
9631 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9632 yet. Defer folding until we see all the arguments
9633 (after inlining). */
9634 && !CALL_EXPR_VA_ARG_PACK (exp))
9636 int nargs = call_expr_nargs (exp);
9638 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9639 instead last argument is __builtin_va_arg_pack (). Defer folding
9640 even in that case, until arguments are finalized. */
9641 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9643 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9644 if (fndecl2
9645 && TREE_CODE (fndecl2) == FUNCTION_DECL
9646 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9647 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9648 return NULL_TREE;
9651 if (avoid_folding_inline_builtin (fndecl))
9652 return NULL_TREE;
9654 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9655 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9656 CALL_EXPR_ARGP (exp), ignore);
9657 else
9659 tree *args = CALL_EXPR_ARGP (exp);
9660 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9661 if (ret)
9662 return ret;
9665 return NULL_TREE;
9668 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9669 N arguments are passed in the array ARGARRAY. Return a folded
9670 expression or NULL_TREE if no simplification was possible. */
9672 tree
9673 fold_builtin_call_array (location_t loc, tree,
9674 tree fn,
9675 int n,
9676 tree *argarray)
9678 if (TREE_CODE (fn) != ADDR_EXPR)
9679 return NULL_TREE;
9681 tree fndecl = TREE_OPERAND (fn, 0);
9682 if (TREE_CODE (fndecl) == FUNCTION_DECL
9683 && DECL_BUILT_IN (fndecl))
9685 /* If last argument is __builtin_va_arg_pack (), arguments to this
9686 function are not finalized yet. Defer folding until they are. */
9687 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9689 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9690 if (fndecl2
9691 && TREE_CODE (fndecl2) == FUNCTION_DECL
9692 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9693 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9694 return NULL_TREE;
9696 if (avoid_folding_inline_builtin (fndecl))
9697 return NULL_TREE;
9698 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9699 return targetm.fold_builtin (fndecl, n, argarray, false);
9700 else
9701 return fold_builtin_n (loc, fndecl, argarray, n, false);
9704 return NULL_TREE;
9707 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9708 along with N new arguments specified as the "..." parameters. SKIP
9709 is the number of arguments in EXP to be omitted. This function is used
9710 to do varargs-to-varargs transformations. */
9712 static tree
9713 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9715 va_list ap;
9716 tree t;
9718 va_start (ap, n);
9719 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9720 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9721 va_end (ap);
9723 return t;
9726 /* Validate a single argument ARG against a tree code CODE representing
9727 a type. Return true when argument is valid. */
9729 static bool
9730 validate_arg (const_tree arg, enum tree_code code)
9732 if (!arg)
9733 return false;
9734 else if (code == POINTER_TYPE)
9735 return POINTER_TYPE_P (TREE_TYPE (arg));
9736 else if (code == INTEGER_TYPE)
9737 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9738 return code == TREE_CODE (TREE_TYPE (arg));
9741 /* This function validates the types of a function call argument list
9742 against a specified list of tree_codes. If the last specifier is a 0,
9743 that represents an ellipses, otherwise the last specifier must be a
9744 VOID_TYPE.
9746 This is the GIMPLE version of validate_arglist. Eventually we want to
9747 completely convert builtins.c to work from GIMPLEs and the tree based
9748 validate_arglist will then be removed. */
9750 bool
9751 validate_gimple_arglist (const gcall *call, ...)
9753 enum tree_code code;
9754 bool res = 0;
9755 va_list ap;
9756 const_tree arg;
9757 size_t i;
9759 va_start (ap, call);
9760 i = 0;
9764 code = (enum tree_code) va_arg (ap, int);
9765 switch (code)
9767 case 0:
9768 /* This signifies an ellipses, any further arguments are all ok. */
9769 res = true;
9770 goto end;
9771 case VOID_TYPE:
9772 /* This signifies an endlink, if no arguments remain, return
9773 true, otherwise return false. */
9774 res = (i == gimple_call_num_args (call));
9775 goto end;
9776 default:
9777 /* If no parameters remain or the parameter's code does not
9778 match the specified code, return false. Otherwise continue
9779 checking any remaining arguments. */
9780 arg = gimple_call_arg (call, i++);
9781 if (!validate_arg (arg, code))
9782 goto end;
9783 break;
9786 while (1);
9788 /* We need gotos here since we can only have one VA_CLOSE in a
9789 function. */
9790 end: ;
9791 va_end (ap);
9793 return res;
9796 /* Default target-specific builtin expander that does nothing. */
9799 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9800 rtx target ATTRIBUTE_UNUSED,
9801 rtx subtarget ATTRIBUTE_UNUSED,
9802 machine_mode mode ATTRIBUTE_UNUSED,
9803 int ignore ATTRIBUTE_UNUSED)
9805 return NULL_RTX;
9808 /* Returns true is EXP represents data that would potentially reside
9809 in a readonly section. */
9811 bool
9812 readonly_data_expr (tree exp)
9814 STRIP_NOPS (exp);
9816 if (TREE_CODE (exp) != ADDR_EXPR)
9817 return false;
9819 exp = get_base_address (TREE_OPERAND (exp, 0));
9820 if (!exp)
9821 return false;
9823 /* Make sure we call decl_readonly_section only for trees it
9824 can handle (since it returns true for everything it doesn't
9825 understand). */
9826 if (TREE_CODE (exp) == STRING_CST
9827 || TREE_CODE (exp) == CONSTRUCTOR
9828 || (VAR_P (exp) && TREE_STATIC (exp)))
9829 return decl_readonly_section (exp, 0);
9830 else
9831 return false;
9834 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9835 to the call, and TYPE is its return type.
9837 Return NULL_TREE if no simplification was possible, otherwise return the
9838 simplified form of the call as a tree.
9840 The simplified form may be a constant or other expression which
9841 computes the same value, but in a more efficient manner (including
9842 calls to other builtin functions).
9844 The call may contain arguments which need to be evaluated, but
9845 which are not useful to determine the result of the call. In
9846 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9847 COMPOUND_EXPR will be an argument which must be evaluated.
9848 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9849 COMPOUND_EXPR in the chain will contain the tree for the simplified
9850 form of the builtin function call. */
9852 static tree
9853 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9855 if (!validate_arg (s1, POINTER_TYPE)
9856 || !validate_arg (s2, POINTER_TYPE))
9857 return NULL_TREE;
9858 else
9860 tree fn;
9861 const char *p1, *p2;
9863 p2 = c_getstr (s2);
9864 if (p2 == NULL)
9865 return NULL_TREE;
9867 p1 = c_getstr (s1);
9868 if (p1 != NULL)
9870 const char *r = strpbrk (p1, p2);
9871 tree tem;
9873 if (r == NULL)
9874 return build_int_cst (TREE_TYPE (s1), 0);
9876 /* Return an offset into the constant string argument. */
9877 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9878 return fold_convert_loc (loc, type, tem);
9881 if (p2[0] == '\0')
9882 /* strpbrk(x, "") == NULL.
9883 Evaluate and ignore s1 in case it had side-effects. */
9884 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9886 if (p2[1] != '\0')
9887 return NULL_TREE; /* Really call strpbrk. */
9889 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9890 if (!fn)
9891 return NULL_TREE;
9893 /* New argument list transforming strpbrk(s1, s2) to
9894 strchr(s1, s2[0]). */
9895 return build_call_expr_loc (loc, fn, 2, s1,
9896 build_int_cst (integer_type_node, p2[0]));
9900 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9901 to the call.
9903 Return NULL_TREE if no simplification was possible, otherwise return the
9904 simplified form of the call as a tree.
9906 The simplified form may be a constant or other expression which
9907 computes the same value, but in a more efficient manner (including
9908 calls to other builtin functions).
9910 The call may contain arguments which need to be evaluated, but
9911 which are not useful to determine the result of the call. In
9912 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9913 COMPOUND_EXPR will be an argument which must be evaluated.
9914 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9915 COMPOUND_EXPR in the chain will contain the tree for the simplified
9916 form of the builtin function call. */
9918 static tree
9919 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9921 if (!validate_arg (s1, POINTER_TYPE)
9922 || !validate_arg (s2, POINTER_TYPE))
9923 return NULL_TREE;
9924 else
9926 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9928 /* If either argument is "", return NULL_TREE. */
9929 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9930 /* Evaluate and ignore both arguments in case either one has
9931 side-effects. */
9932 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9933 s1, s2);
9934 return NULL_TREE;
9938 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9939 to the call.
9941 Return NULL_TREE if no simplification was possible, otherwise return the
9942 simplified form of the call as a tree.
9944 The simplified form may be a constant or other expression which
9945 computes the same value, but in a more efficient manner (including
9946 calls to other builtin functions).
9948 The call may contain arguments which need to be evaluated, but
9949 which are not useful to determine the result of the call. In
9950 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9951 COMPOUND_EXPR will be an argument which must be evaluated.
9952 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9953 COMPOUND_EXPR in the chain will contain the tree for the simplified
9954 form of the builtin function call. */
9956 static tree
9957 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9959 if (!validate_arg (s1, POINTER_TYPE)
9960 || !validate_arg (s2, POINTER_TYPE))
9961 return NULL_TREE;
9962 else
9964 /* If the first argument is "", return NULL_TREE. */
9965 const char *p1 = c_getstr (s1);
9966 if (p1 && *p1 == '\0')
9968 /* Evaluate and ignore argument s2 in case it has
9969 side-effects. */
9970 return omit_one_operand_loc (loc, size_type_node,
9971 size_zero_node, s2);
9974 /* If the second argument is "", return __builtin_strlen(s1). */
9975 const char *p2 = c_getstr (s2);
9976 if (p2 && *p2 == '\0')
9978 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9980 /* If the replacement _DECL isn't initialized, don't do the
9981 transformation. */
9982 if (!fn)
9983 return NULL_TREE;
9985 return build_call_expr_loc (loc, fn, 1, s1);
9987 return NULL_TREE;
9991 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9992 produced. False otherwise. This is done so that we don't output the error
9993 or warning twice or three times. */
9995 bool
9996 fold_builtin_next_arg (tree exp, bool va_start_p)
9998 tree fntype = TREE_TYPE (current_function_decl);
9999 int nargs = call_expr_nargs (exp);
10000 tree arg;
10001 /* There is good chance the current input_location points inside the
10002 definition of the va_start macro (perhaps on the token for
10003 builtin) in a system header, so warnings will not be emitted.
10004 Use the location in real source code. */
10005 source_location current_location =
10006 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10007 NULL);
10009 if (!stdarg_p (fntype))
10011 error ("%<va_start%> used in function with fixed args");
10012 return true;
10015 if (va_start_p)
10017 if (va_start_p && (nargs != 2))
10019 error ("wrong number of arguments to function %<va_start%>");
10020 return true;
10022 arg = CALL_EXPR_ARG (exp, 1);
10024 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10025 when we checked the arguments and if needed issued a warning. */
10026 else
10028 if (nargs == 0)
10030 /* Evidently an out of date version of <stdarg.h>; can't validate
10031 va_start's second argument, but can still work as intended. */
10032 warning_at (current_location,
10033 OPT_Wvarargs,
10034 "%<__builtin_next_arg%> called without an argument");
10035 return true;
10037 else if (nargs > 1)
10039 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10040 return true;
10042 arg = CALL_EXPR_ARG (exp, 0);
10045 if (TREE_CODE (arg) == SSA_NAME)
10046 arg = SSA_NAME_VAR (arg);
10048 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10049 or __builtin_next_arg (0) the first time we see it, after checking
10050 the arguments and if needed issuing a warning. */
10051 if (!integer_zerop (arg))
10053 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10055 /* Strip off all nops for the sake of the comparison. This
10056 is not quite the same as STRIP_NOPS. It does more.
10057 We must also strip off INDIRECT_EXPR for C++ reference
10058 parameters. */
10059 while (CONVERT_EXPR_P (arg)
10060 || TREE_CODE (arg) == INDIRECT_REF)
10061 arg = TREE_OPERAND (arg, 0);
10062 if (arg != last_parm)
10064 /* FIXME: Sometimes with the tree optimizers we can get the
10065 not the last argument even though the user used the last
10066 argument. We just warn and set the arg to be the last
10067 argument so that we will get wrong-code because of
10068 it. */
10069 warning_at (current_location,
10070 OPT_Wvarargs,
10071 "second parameter of %<va_start%> not last named argument");
10074 /* Undefined by C99 7.15.1.4p4 (va_start):
10075 "If the parameter parmN is declared with the register storage
10076 class, with a function or array type, or with a type that is
10077 not compatible with the type that results after application of
10078 the default argument promotions, the behavior is undefined."
10080 else if (DECL_REGISTER (arg))
10082 warning_at (current_location,
10083 OPT_Wvarargs,
10084 "undefined behavior when second parameter of "
10085 "%<va_start%> is declared with %<register%> storage");
10088 /* We want to verify the second parameter just once before the tree
10089 optimizers are run and then avoid keeping it in the tree,
10090 as otherwise we could warn even for correct code like:
10091 void foo (int i, ...)
10092 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10093 if (va_start_p)
10094 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10095 else
10096 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10098 return false;
10102 /* Expand a call EXP to __builtin_object_size. */
10104 static rtx
10105 expand_builtin_object_size (tree exp)
10107 tree ost;
10108 int object_size_type;
10109 tree fndecl = get_callee_fndecl (exp);
10111 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10113 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10114 exp, fndecl);
10115 expand_builtin_trap ();
10116 return const0_rtx;
10119 ost = CALL_EXPR_ARG (exp, 1);
10120 STRIP_NOPS (ost);
10122 if (TREE_CODE (ost) != INTEGER_CST
10123 || tree_int_cst_sgn (ost) < 0
10124 || compare_tree_int (ost, 3) > 0)
10126 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10127 exp, fndecl);
10128 expand_builtin_trap ();
10129 return const0_rtx;
10132 object_size_type = tree_to_shwi (ost);
10134 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10137 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10138 FCODE is the BUILT_IN_* to use.
10139 Return NULL_RTX if we failed; the caller should emit a normal call,
10140 otherwise try to get the result in TARGET, if convenient (and in
10141 mode MODE if that's convenient). */
10143 static rtx
10144 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10145 enum built_in_function fcode)
10147 if (!validate_arglist (exp,
10148 POINTER_TYPE,
10149 fcode == BUILT_IN_MEMSET_CHK
10150 ? INTEGER_TYPE : POINTER_TYPE,
10151 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10152 return NULL_RTX;
10154 tree dest = CALL_EXPR_ARG (exp, 0);
10155 tree src = CALL_EXPR_ARG (exp, 1);
10156 tree len = CALL_EXPR_ARG (exp, 2);
10157 tree size = CALL_EXPR_ARG (exp, 3);
10159 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10160 /*str=*/NULL_TREE, size);
10162 if (!tree_fits_uhwi_p (size))
10163 return NULL_RTX;
10165 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10167 /* Avoid transforming the checking call to an ordinary one when
10168 an overflow has been detected or when the call couldn't be
10169 validated because the size is not constant. */
10170 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10171 return NULL_RTX;
10173 tree fn = NULL_TREE;
10174 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10175 mem{cpy,pcpy,move,set} is available. */
10176 switch (fcode)
10178 case BUILT_IN_MEMCPY_CHK:
10179 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10180 break;
10181 case BUILT_IN_MEMPCPY_CHK:
10182 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10183 break;
10184 case BUILT_IN_MEMMOVE_CHK:
10185 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10186 break;
10187 case BUILT_IN_MEMSET_CHK:
10188 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10189 break;
10190 default:
10191 break;
10194 if (! fn)
10195 return NULL_RTX;
10197 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10198 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10199 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10200 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10202 else if (fcode == BUILT_IN_MEMSET_CHK)
10203 return NULL_RTX;
10204 else
10206 unsigned int dest_align = get_pointer_alignment (dest);
10208 /* If DEST is not a pointer type, call the normal function. */
10209 if (dest_align == 0)
10210 return NULL_RTX;
10212 /* If SRC and DEST are the same (and not volatile), do nothing. */
10213 if (operand_equal_p (src, dest, 0))
10215 tree expr;
10217 if (fcode != BUILT_IN_MEMPCPY_CHK)
10219 /* Evaluate and ignore LEN in case it has side-effects. */
10220 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10221 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10224 expr = fold_build_pointer_plus (dest, len);
10225 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10228 /* __memmove_chk special case. */
10229 if (fcode == BUILT_IN_MEMMOVE_CHK)
10231 unsigned int src_align = get_pointer_alignment (src);
10233 if (src_align == 0)
10234 return NULL_RTX;
10236 /* If src is categorized for a readonly section we can use
10237 normal __memcpy_chk. */
10238 if (readonly_data_expr (src))
10240 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10241 if (!fn)
10242 return NULL_RTX;
10243 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10244 dest, src, len, size);
10245 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10246 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10247 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10250 return NULL_RTX;
10254 /* Emit warning if a buffer overflow is detected at compile time. */
10256 static void
10257 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10259 /* The source string. */
10260 tree srcstr = NULL_TREE;
10261 /* The size of the destination object. */
10262 tree objsize = NULL_TREE;
10263 /* The string that is being concatenated with (as in __strcat_chk)
10264 or null if it isn't. */
10265 tree catstr = NULL_TREE;
10266 /* The maximum length of the source sequence in a bounded operation
10267 (such as __strncat_chk) or null if the operation isn't bounded
10268 (such as __strcat_chk). */
10269 tree maxread = NULL_TREE;
10270 /* The exact size of the access (such as in __strncpy_chk). */
10271 tree size = NULL_TREE;
10273 switch (fcode)
10275 case BUILT_IN_STRCPY_CHK:
10276 case BUILT_IN_STPCPY_CHK:
10277 srcstr = CALL_EXPR_ARG (exp, 1);
10278 objsize = CALL_EXPR_ARG (exp, 2);
10279 break;
10281 case BUILT_IN_STRCAT_CHK:
10282 /* For __strcat_chk the warning will be emitted only if overflowing
10283 by at least strlen (dest) + 1 bytes. */
10284 catstr = CALL_EXPR_ARG (exp, 0);
10285 srcstr = CALL_EXPR_ARG (exp, 1);
10286 objsize = CALL_EXPR_ARG (exp, 2);
10287 break;
10289 case BUILT_IN_STRNCAT_CHK:
10290 catstr = CALL_EXPR_ARG (exp, 0);
10291 srcstr = CALL_EXPR_ARG (exp, 1);
10292 maxread = CALL_EXPR_ARG (exp, 2);
10293 objsize = CALL_EXPR_ARG (exp, 3);
10294 break;
10296 case BUILT_IN_STRNCPY_CHK:
10297 case BUILT_IN_STPNCPY_CHK:
10298 srcstr = CALL_EXPR_ARG (exp, 1);
10299 size = CALL_EXPR_ARG (exp, 2);
10300 objsize = CALL_EXPR_ARG (exp, 3);
10301 break;
10303 case BUILT_IN_SNPRINTF_CHK:
10304 case BUILT_IN_VSNPRINTF_CHK:
10305 maxread = CALL_EXPR_ARG (exp, 1);
10306 objsize = CALL_EXPR_ARG (exp, 3);
10307 break;
10308 default:
10309 gcc_unreachable ();
10312 if (catstr && maxread)
10314 /* Check __strncat_chk. There is no way to determine the length
10315 of the string to which the source string is being appended so
10316 just warn when the length of the source string is not known. */
10317 check_strncat_sizes (exp, objsize);
10318 return;
10321 /* The destination argument is the first one for all built-ins above. */
10322 tree dst = CALL_EXPR_ARG (exp, 0);
10324 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10327 /* Emit warning if a buffer overflow is detected at compile time
10328 in __sprintf_chk/__vsprintf_chk calls. */
10330 static void
10331 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10333 tree size, len, fmt;
10334 const char *fmt_str;
10335 int nargs = call_expr_nargs (exp);
10337 /* Verify the required arguments in the original call. */
10339 if (nargs < 4)
10340 return;
10341 size = CALL_EXPR_ARG (exp, 2);
10342 fmt = CALL_EXPR_ARG (exp, 3);
10344 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10345 return;
10347 /* Check whether the format is a literal string constant. */
10348 fmt_str = c_getstr (fmt);
10349 if (fmt_str == NULL)
10350 return;
10352 if (!init_target_chars ())
10353 return;
10355 /* If the format doesn't contain % args or %%, we know its size. */
10356 if (strchr (fmt_str, target_percent) == 0)
10357 len = build_int_cstu (size_type_node, strlen (fmt_str));
10358 /* If the format is "%s" and first ... argument is a string literal,
10359 we know it too. */
10360 else if (fcode == BUILT_IN_SPRINTF_CHK
10361 && strcmp (fmt_str, target_percent_s) == 0)
10363 tree arg;
10365 if (nargs < 5)
10366 return;
10367 arg = CALL_EXPR_ARG (exp, 4);
10368 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10369 return;
10371 len = c_strlen (arg, 1);
10372 if (!len || ! tree_fits_uhwi_p (len))
10373 return;
10375 else
10376 return;
10378 /* Add one for the terminating nul. */
10379 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10381 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10382 /*maxread=*/NULL_TREE, len, size);
10385 /* Emit warning if a free is called with address of a variable. */
10387 static void
10388 maybe_emit_free_warning (tree exp)
10390 tree arg = CALL_EXPR_ARG (exp, 0);
10392 STRIP_NOPS (arg);
10393 if (TREE_CODE (arg) != ADDR_EXPR)
10394 return;
10396 arg = get_base_address (TREE_OPERAND (arg, 0));
10397 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10398 return;
10400 if (SSA_VAR_P (arg))
10401 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10402 "%Kattempt to free a non-heap object %qD", exp, arg);
10403 else
10404 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10405 "%Kattempt to free a non-heap object", exp);
10408 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10409 if possible. */
10411 static tree
10412 fold_builtin_object_size (tree ptr, tree ost)
10414 unsigned HOST_WIDE_INT bytes;
10415 int object_size_type;
10417 if (!validate_arg (ptr, POINTER_TYPE)
10418 || !validate_arg (ost, INTEGER_TYPE))
10419 return NULL_TREE;
10421 STRIP_NOPS (ost);
10423 if (TREE_CODE (ost) != INTEGER_CST
10424 || tree_int_cst_sgn (ost) < 0
10425 || compare_tree_int (ost, 3) > 0)
10426 return NULL_TREE;
10428 object_size_type = tree_to_shwi (ost);
10430 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10431 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10432 and (size_t) 0 for types 2 and 3. */
10433 if (TREE_SIDE_EFFECTS (ptr))
10434 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10436 if (TREE_CODE (ptr) == ADDR_EXPR)
10438 compute_builtin_object_size (ptr, object_size_type, &bytes);
10439 if (wi::fits_to_tree_p (bytes, size_type_node))
10440 return build_int_cstu (size_type_node, bytes);
10442 else if (TREE_CODE (ptr) == SSA_NAME)
10444 /* If object size is not known yet, delay folding until
10445 later. Maybe subsequent passes will help determining
10446 it. */
10447 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10448 && wi::fits_to_tree_p (bytes, size_type_node))
10449 return build_int_cstu (size_type_node, bytes);
10452 return NULL_TREE;
10455 /* Builtins with folding operations that operate on "..." arguments
10456 need special handling; we need to store the arguments in a convenient
10457 data structure before attempting any folding. Fortunately there are
10458 only a few builtins that fall into this category. FNDECL is the
10459 function, EXP is the CALL_EXPR for the call. */
10461 static tree
10462 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10464 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10465 tree ret = NULL_TREE;
10467 switch (fcode)
10469 case BUILT_IN_FPCLASSIFY:
10470 ret = fold_builtin_fpclassify (loc, args, nargs);
10471 break;
10473 default:
10474 break;
10476 if (ret)
10478 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10479 SET_EXPR_LOCATION (ret, loc);
10480 TREE_NO_WARNING (ret) = 1;
10481 return ret;
10483 return NULL_TREE;
10486 /* Initialize format string characters in the target charset. */
10488 bool
10489 init_target_chars (void)
10491 static bool init;
10492 if (!init)
10494 target_newline = lang_hooks.to_target_charset ('\n');
10495 target_percent = lang_hooks.to_target_charset ('%');
10496 target_c = lang_hooks.to_target_charset ('c');
10497 target_s = lang_hooks.to_target_charset ('s');
10498 if (target_newline == 0 || target_percent == 0 || target_c == 0
10499 || target_s == 0)
10500 return false;
10502 target_percent_c[0] = target_percent;
10503 target_percent_c[1] = target_c;
10504 target_percent_c[2] = '\0';
10506 target_percent_s[0] = target_percent;
10507 target_percent_s[1] = target_s;
10508 target_percent_s[2] = '\0';
10510 target_percent_s_newline[0] = target_percent;
10511 target_percent_s_newline[1] = target_s;
10512 target_percent_s_newline[2] = target_newline;
10513 target_percent_s_newline[3] = '\0';
10515 init = true;
10517 return true;
10520 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10521 and no overflow/underflow occurred. INEXACT is true if M was not
10522 exactly calculated. TYPE is the tree type for the result. This
10523 function assumes that you cleared the MPFR flags and then
10524 calculated M to see if anything subsequently set a flag prior to
10525 entering this function. Return NULL_TREE if any checks fail. */
10527 static tree
10528 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10530 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10531 overflow/underflow occurred. If -frounding-math, proceed iff the
10532 result of calling FUNC was exact. */
10533 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10534 && (!flag_rounding_math || !inexact))
10536 REAL_VALUE_TYPE rr;
10538 real_from_mpfr (&rr, m, type, GMP_RNDN);
10539 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10540 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10541 but the mpft_t is not, then we underflowed in the
10542 conversion. */
10543 if (real_isfinite (&rr)
10544 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10546 REAL_VALUE_TYPE rmode;
10548 real_convert (&rmode, TYPE_MODE (type), &rr);
10549 /* Proceed iff the specified mode can hold the value. */
10550 if (real_identical (&rmode, &rr))
10551 return build_real (type, rmode);
10554 return NULL_TREE;
10557 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10558 number and no overflow/underflow occurred. INEXACT is true if M
10559 was not exactly calculated. TYPE is the tree type for the result.
10560 This function assumes that you cleared the MPFR flags and then
10561 calculated M to see if anything subsequently set a flag prior to
10562 entering this function. Return NULL_TREE if any checks fail, if
10563 FORCE_CONVERT is true, then bypass the checks. */
10565 static tree
10566 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10568 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10569 overflow/underflow occurred. If -frounding-math, proceed iff the
10570 result of calling FUNC was exact. */
10571 if (force_convert
10572 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10573 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10574 && (!flag_rounding_math || !inexact)))
10576 REAL_VALUE_TYPE re, im;
10578 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10579 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10580 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10581 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10582 but the mpft_t is not, then we underflowed in the
10583 conversion. */
10584 if (force_convert
10585 || (real_isfinite (&re) && real_isfinite (&im)
10586 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10587 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10589 REAL_VALUE_TYPE re_mode, im_mode;
10591 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10592 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10593 /* Proceed iff the specified mode can hold the value. */
10594 if (force_convert
10595 || (real_identical (&re_mode, &re)
10596 && real_identical (&im_mode, &im)))
10597 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10598 build_real (TREE_TYPE (type), im_mode));
10601 return NULL_TREE;
10604 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10605 the pointer *(ARG_QUO) and return the result. The type is taken
10606 from the type of ARG0 and is used for setting the precision of the
10607 calculation and results. */
10609 static tree
10610 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10612 tree const type = TREE_TYPE (arg0);
10613 tree result = NULL_TREE;
10615 STRIP_NOPS (arg0);
10616 STRIP_NOPS (arg1);
10618 /* To proceed, MPFR must exactly represent the target floating point
10619 format, which only happens when the target base equals two. */
10620 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10621 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10622 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10624 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10625 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10627 if (real_isfinite (ra0) && real_isfinite (ra1))
10629 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10630 const int prec = fmt->p;
10631 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10632 tree result_rem;
10633 long integer_quo;
10634 mpfr_t m0, m1;
10636 mpfr_inits2 (prec, m0, m1, NULL);
10637 mpfr_from_real (m0, ra0, GMP_RNDN);
10638 mpfr_from_real (m1, ra1, GMP_RNDN);
10639 mpfr_clear_flags ();
10640 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10641 /* Remquo is independent of the rounding mode, so pass
10642 inexact=0 to do_mpfr_ckconv(). */
10643 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10644 mpfr_clears (m0, m1, NULL);
10645 if (result_rem)
10647 /* MPFR calculates quo in the host's long so it may
10648 return more bits in quo than the target int can hold
10649 if sizeof(host long) > sizeof(target int). This can
10650 happen even for native compilers in LP64 mode. In
10651 these cases, modulo the quo value with the largest
10652 number that the target int can hold while leaving one
10653 bit for the sign. */
10654 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10655 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10657 /* Dereference the quo pointer argument. */
10658 arg_quo = build_fold_indirect_ref (arg_quo);
10659 /* Proceed iff a valid pointer type was passed in. */
10660 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10662 /* Set the value. */
10663 tree result_quo
10664 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10665 build_int_cst (TREE_TYPE (arg_quo),
10666 integer_quo));
10667 TREE_SIDE_EFFECTS (result_quo) = 1;
10668 /* Combine the quo assignment with the rem. */
10669 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10670 result_quo, result_rem));
10675 return result;
10678 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10679 resulting value as a tree with type TYPE. The mpfr precision is
10680 set to the precision of TYPE. We assume that this mpfr function
10681 returns zero if the result could be calculated exactly within the
10682 requested precision. In addition, the integer pointer represented
10683 by ARG_SG will be dereferenced and set to the appropriate signgam
10684 (-1,1) value. */
10686 static tree
10687 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10689 tree result = NULL_TREE;
10691 STRIP_NOPS (arg);
10693 /* To proceed, MPFR must exactly represent the target floating point
10694 format, which only happens when the target base equals two. Also
10695 verify ARG is a constant and that ARG_SG is an int pointer. */
10696 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10697 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10698 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10699 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10701 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10703 /* In addition to NaN and Inf, the argument cannot be zero or a
10704 negative integer. */
10705 if (real_isfinite (ra)
10706 && ra->cl != rvc_zero
10707 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10709 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10710 const int prec = fmt->p;
10711 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10712 int inexact, sg;
10713 mpfr_t m;
10714 tree result_lg;
10716 mpfr_init2 (m, prec);
10717 mpfr_from_real (m, ra, GMP_RNDN);
10718 mpfr_clear_flags ();
10719 inexact = mpfr_lgamma (m, &sg, m, rnd);
10720 result_lg = do_mpfr_ckconv (m, type, inexact);
10721 mpfr_clear (m);
10722 if (result_lg)
10724 tree result_sg;
10726 /* Dereference the arg_sg pointer argument. */
10727 arg_sg = build_fold_indirect_ref (arg_sg);
10728 /* Assign the signgam value into *arg_sg. */
10729 result_sg = fold_build2 (MODIFY_EXPR,
10730 TREE_TYPE (arg_sg), arg_sg,
10731 build_int_cst (TREE_TYPE (arg_sg), sg));
10732 TREE_SIDE_EFFECTS (result_sg) = 1;
10733 /* Combine the signgam assignment with the lgamma result. */
10734 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10735 result_sg, result_lg));
10740 return result;
10743 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10744 mpc function FUNC on it and return the resulting value as a tree
10745 with type TYPE. The mpfr precision is set to the precision of
10746 TYPE. We assume that function FUNC returns zero if the result
10747 could be calculated exactly within the requested precision. If
10748 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10749 in the arguments and/or results. */
10751 tree
10752 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10753 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10755 tree result = NULL_TREE;
10757 STRIP_NOPS (arg0);
10758 STRIP_NOPS (arg1);
10760 /* To proceed, MPFR must exactly represent the target floating point
10761 format, which only happens when the target base equals two. */
10762 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10764 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10766 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10768 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10769 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10770 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10771 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10773 if (do_nonfinite
10774 || (real_isfinite (re0) && real_isfinite (im0)
10775 && real_isfinite (re1) && real_isfinite (im1)))
10777 const struct real_format *const fmt =
10778 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10779 const int prec = fmt->p;
10780 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10781 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10782 int inexact;
10783 mpc_t m0, m1;
10785 mpc_init2 (m0, prec);
10786 mpc_init2 (m1, prec);
10787 mpfr_from_real (mpc_realref (m0), re0, rnd);
10788 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10789 mpfr_from_real (mpc_realref (m1), re1, rnd);
10790 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10791 mpfr_clear_flags ();
10792 inexact = func (m0, m0, m1, crnd);
10793 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10794 mpc_clear (m0);
10795 mpc_clear (m1);
10799 return result;
10802 /* A wrapper function for builtin folding that prevents warnings for
10803 "statement without effect" and the like, caused by removing the
10804 call node earlier than the warning is generated. */
10806 tree
10807 fold_call_stmt (gcall *stmt, bool ignore)
10809 tree ret = NULL_TREE;
10810 tree fndecl = gimple_call_fndecl (stmt);
10811 location_t loc = gimple_location (stmt);
10812 if (fndecl
10813 && TREE_CODE (fndecl) == FUNCTION_DECL
10814 && DECL_BUILT_IN (fndecl)
10815 && !gimple_call_va_arg_pack_p (stmt))
10817 int nargs = gimple_call_num_args (stmt);
10818 tree *args = (nargs > 0
10819 ? gimple_call_arg_ptr (stmt, 0)
10820 : &error_mark_node);
10822 if (avoid_folding_inline_builtin (fndecl))
10823 return NULL_TREE;
10824 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10826 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10828 else
10830 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10831 if (ret)
10833 /* Propagate location information from original call to
10834 expansion of builtin. Otherwise things like
10835 maybe_emit_chk_warning, that operate on the expansion
10836 of a builtin, will use the wrong location information. */
10837 if (gimple_has_location (stmt))
10839 tree realret = ret;
10840 if (TREE_CODE (ret) == NOP_EXPR)
10841 realret = TREE_OPERAND (ret, 0);
10842 if (CAN_HAVE_LOCATION_P (realret)
10843 && !EXPR_HAS_LOCATION (realret))
10844 SET_EXPR_LOCATION (realret, loc);
10845 return realret;
10847 return ret;
10851 return NULL_TREE;
10854 /* Look up the function in builtin_decl that corresponds to DECL
10855 and set ASMSPEC as its user assembler name. DECL must be a
10856 function decl that declares a builtin. */
10858 void
10859 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10861 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10862 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10863 && asmspec != 0);
10865 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10866 set_user_assembler_name (builtin, asmspec);
10868 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10869 && INT_TYPE_SIZE < BITS_PER_WORD)
10871 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10872 set_user_assembler_libfunc ("ffs", asmspec);
10873 set_optab_libfunc (ffs_optab, mode, "ffs");
10877 /* Return true if DECL is a builtin that expands to a constant or similarly
10878 simple code. */
10879 bool
10880 is_simple_builtin (tree decl)
10882 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10883 switch (DECL_FUNCTION_CODE (decl))
10885 /* Builtins that expand to constants. */
10886 case BUILT_IN_CONSTANT_P:
10887 case BUILT_IN_EXPECT:
10888 case BUILT_IN_OBJECT_SIZE:
10889 case BUILT_IN_UNREACHABLE:
10890 /* Simple register moves or loads from stack. */
10891 case BUILT_IN_ASSUME_ALIGNED:
10892 case BUILT_IN_RETURN_ADDRESS:
10893 case BUILT_IN_EXTRACT_RETURN_ADDR:
10894 case BUILT_IN_FROB_RETURN_ADDR:
10895 case BUILT_IN_RETURN:
10896 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10897 case BUILT_IN_FRAME_ADDRESS:
10898 case BUILT_IN_VA_END:
10899 case BUILT_IN_STACK_SAVE:
10900 case BUILT_IN_STACK_RESTORE:
10901 /* Exception state returns or moves registers around. */
10902 case BUILT_IN_EH_FILTER:
10903 case BUILT_IN_EH_POINTER:
10904 case BUILT_IN_EH_COPY_VALUES:
10905 return true;
10907 default:
10908 return false;
10911 return false;
10914 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10915 most probably expanded inline into reasonably simple code. This is a
10916 superset of is_simple_builtin. */
10917 bool
10918 is_inexpensive_builtin (tree decl)
10920 if (!decl)
10921 return false;
10922 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10923 return true;
10924 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10925 switch (DECL_FUNCTION_CODE (decl))
10927 case BUILT_IN_ABS:
10928 CASE_BUILT_IN_ALLOCA:
10929 case BUILT_IN_BSWAP16:
10930 case BUILT_IN_BSWAP32:
10931 case BUILT_IN_BSWAP64:
10932 case BUILT_IN_CLZ:
10933 case BUILT_IN_CLZIMAX:
10934 case BUILT_IN_CLZL:
10935 case BUILT_IN_CLZLL:
10936 case BUILT_IN_CTZ:
10937 case BUILT_IN_CTZIMAX:
10938 case BUILT_IN_CTZL:
10939 case BUILT_IN_CTZLL:
10940 case BUILT_IN_FFS:
10941 case BUILT_IN_FFSIMAX:
10942 case BUILT_IN_FFSL:
10943 case BUILT_IN_FFSLL:
10944 case BUILT_IN_IMAXABS:
10945 case BUILT_IN_FINITE:
10946 case BUILT_IN_FINITEF:
10947 case BUILT_IN_FINITEL:
10948 case BUILT_IN_FINITED32:
10949 case BUILT_IN_FINITED64:
10950 case BUILT_IN_FINITED128:
10951 case BUILT_IN_FPCLASSIFY:
10952 case BUILT_IN_ISFINITE:
10953 case BUILT_IN_ISINF_SIGN:
10954 case BUILT_IN_ISINF:
10955 case BUILT_IN_ISINFF:
10956 case BUILT_IN_ISINFL:
10957 case BUILT_IN_ISINFD32:
10958 case BUILT_IN_ISINFD64:
10959 case BUILT_IN_ISINFD128:
10960 case BUILT_IN_ISNAN:
10961 case BUILT_IN_ISNANF:
10962 case BUILT_IN_ISNANL:
10963 case BUILT_IN_ISNAND32:
10964 case BUILT_IN_ISNAND64:
10965 case BUILT_IN_ISNAND128:
10966 case BUILT_IN_ISNORMAL:
10967 case BUILT_IN_ISGREATER:
10968 case BUILT_IN_ISGREATEREQUAL:
10969 case BUILT_IN_ISLESS:
10970 case BUILT_IN_ISLESSEQUAL:
10971 case BUILT_IN_ISLESSGREATER:
10972 case BUILT_IN_ISUNORDERED:
10973 case BUILT_IN_VA_ARG_PACK:
10974 case BUILT_IN_VA_ARG_PACK_LEN:
10975 case BUILT_IN_VA_COPY:
10976 case BUILT_IN_TRAP:
10977 case BUILT_IN_SAVEREGS:
10978 case BUILT_IN_POPCOUNTL:
10979 case BUILT_IN_POPCOUNTLL:
10980 case BUILT_IN_POPCOUNTIMAX:
10981 case BUILT_IN_POPCOUNT:
10982 case BUILT_IN_PARITYL:
10983 case BUILT_IN_PARITYLL:
10984 case BUILT_IN_PARITYIMAX:
10985 case BUILT_IN_PARITY:
10986 case BUILT_IN_LABS:
10987 case BUILT_IN_LLABS:
10988 case BUILT_IN_PREFETCH:
10989 case BUILT_IN_ACC_ON_DEVICE:
10990 return true;
10992 default:
10993 return is_simple_builtin (decl);
10996 return false;
10999 /* Return true if T is a constant and the value cast to a target char
11000 can be represented by a host char.
11001 Store the casted char constant in *P if so. */
11003 bool
11004 target_char_cst_p (tree t, char *p)
11006 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11007 return false;
11009 *p = (char)tree_to_uhwi (t);
11010 return true;
11013 /* Return the maximum object size. */
11015 tree
11016 max_object_size (void)
11018 /* To do: Make this a configurable parameter. */
11019 return TYPE_MAX_VALUE (ptrdiff_type_node);