testsuite: Use posix_memalign on AIX for tsvc
[official-gcc.git] / gcc / builtins.c
blob7d0f61fc98bc36372d9be1d3a3ddac2e21b5910b
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_cexpi (tree, rtx);
123 static rtx expand_builtin_int_roundingfn (tree, rtx);
124 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125 static rtx expand_builtin_next_arg (void);
126 static rtx expand_builtin_va_start (tree);
127 static rtx expand_builtin_va_end (tree);
128 static rtx expand_builtin_va_copy (tree);
129 static rtx inline_expand_builtin_bytecmp (tree, rtx);
130 static rtx expand_builtin_strcmp (tree, rtx);
131 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132 static rtx expand_builtin_memcpy (tree, rtx);
133 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
134 rtx target, tree exp,
135 memop_ret retmode,
136 bool might_overlap);
137 static rtx expand_builtin_memmove (tree, rtx);
138 static rtx expand_builtin_mempcpy (tree, rtx);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_strncpy (tree, rtx);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static tree fold_builtin_object_size (tree, tree);
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
194 /* Return true if NAME starts with __builtin_ or __sync_. */
196 static bool
197 is_builtin_name (const char *name)
199 return (startswith (name, "__builtin_")
200 || startswith (name, "__sync_")
201 || startswith (name, "__atomic_"));
204 /* Return true if NODE should be considered for inline expansion regardless
205 of the optimization level. This means whenever a function is invoked with
206 its "internal" name, which normally contains the prefix "__builtin". */
208 bool
209 called_as_built_in (tree node)
211 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
212 we want the name used to call the function, not the name it
213 will have. */
214 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
215 return is_builtin_name (name);
218 /* Compute values M and N such that M divides (address of EXP - N) and such
219 that N < M. If these numbers can be determined, store M in alignp and N in
220 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
221 *alignp and any bit-offset to *bitposp.
223 Note that the address (and thus the alignment) computed here is based
224 on the address to which a symbol resolves, whereas DECL_ALIGN is based
225 on the address at which an object is actually located. These two
226 addresses are not always the same. For example, on ARM targets,
227 the address &foo of a Thumb function foo() has the lowest bit set,
228 whereas foo() itself starts on an even address.
230 If ADDR_P is true we are taking the address of the memory reference EXP
231 and thus cannot rely on the access taking place. */
233 static bool
234 get_object_alignment_2 (tree exp, unsigned int *alignp,
235 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
237 poly_int64 bitsize, bitpos;
238 tree offset;
239 machine_mode mode;
240 int unsignedp, reversep, volatilep;
241 unsigned int align = BITS_PER_UNIT;
242 bool known_alignment = false;
244 /* Get the innermost object and the constant (bitpos) and possibly
245 variable (offset) offset of the access. */
246 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
247 &unsignedp, &reversep, &volatilep);
249 /* Extract alignment information from the innermost object and
250 possibly adjust bitpos and offset. */
251 if (TREE_CODE (exp) == FUNCTION_DECL)
253 /* Function addresses can encode extra information besides their
254 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
255 allows the low bit to be used as a virtual bit, we know
256 that the address itself must be at least 2-byte aligned. */
257 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
258 align = 2 * BITS_PER_UNIT;
260 else if (TREE_CODE (exp) == LABEL_DECL)
262 else if (TREE_CODE (exp) == CONST_DECL)
264 /* The alignment of a CONST_DECL is determined by its initializer. */
265 exp = DECL_INITIAL (exp);
266 align = TYPE_ALIGN (TREE_TYPE (exp));
267 if (CONSTANT_CLASS_P (exp))
268 align = targetm.constant_alignment (exp, align);
270 known_alignment = true;
272 else if (DECL_P (exp))
274 align = DECL_ALIGN (exp);
275 known_alignment = true;
277 else if (TREE_CODE (exp) == INDIRECT_REF
278 || TREE_CODE (exp) == MEM_REF
279 || TREE_CODE (exp) == TARGET_MEM_REF)
281 tree addr = TREE_OPERAND (exp, 0);
282 unsigned ptr_align;
283 unsigned HOST_WIDE_INT ptr_bitpos;
284 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
286 /* If the address is explicitely aligned, handle that. */
287 if (TREE_CODE (addr) == BIT_AND_EXPR
288 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
290 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
291 ptr_bitmask *= BITS_PER_UNIT;
292 align = least_bit_hwi (ptr_bitmask);
293 addr = TREE_OPERAND (addr, 0);
296 known_alignment
297 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
298 align = MAX (ptr_align, align);
300 /* Re-apply explicit alignment to the bitpos. */
301 ptr_bitpos &= ptr_bitmask;
303 /* The alignment of the pointer operand in a TARGET_MEM_REF
304 has to take the variable offset parts into account. */
305 if (TREE_CODE (exp) == TARGET_MEM_REF)
307 if (TMR_INDEX (exp))
309 unsigned HOST_WIDE_INT step = 1;
310 if (TMR_STEP (exp))
311 step = TREE_INT_CST_LOW (TMR_STEP (exp));
312 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
314 if (TMR_INDEX2 (exp))
315 align = BITS_PER_UNIT;
316 known_alignment = false;
319 /* When EXP is an actual memory reference then we can use
320 TYPE_ALIGN of a pointer indirection to derive alignment.
321 Do so only if get_pointer_alignment_1 did not reveal absolute
322 alignment knowledge and if using that alignment would
323 improve the situation. */
324 unsigned int talign;
325 if (!addr_p && !known_alignment
326 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
327 && talign > align)
328 align = talign;
329 else
331 /* Else adjust bitpos accordingly. */
332 bitpos += ptr_bitpos;
333 if (TREE_CODE (exp) == MEM_REF
334 || TREE_CODE (exp) == TARGET_MEM_REF)
335 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
338 else if (TREE_CODE (exp) == STRING_CST)
340 /* STRING_CST are the only constant objects we allow to be not
341 wrapped inside a CONST_DECL. */
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 if (CONSTANT_CLASS_P (exp))
344 align = targetm.constant_alignment (exp, align);
346 known_alignment = true;
349 /* If there is a non-constant offset part extract the maximum
350 alignment that can prevail. */
351 if (offset)
353 unsigned int trailing_zeros = tree_ctz (offset);
354 if (trailing_zeros < HOST_BITS_PER_INT)
356 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
357 if (inner)
358 align = MIN (align, inner);
362 /* Account for the alignment of runtime coefficients, so that the constant
363 bitpos is guaranteed to be accurate. */
364 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
365 if (alt_align != 0 && alt_align < align)
367 align = alt_align;
368 known_alignment = false;
371 *alignp = align;
372 *bitposp = bitpos.coeffs[0] & (align - 1);
373 return known_alignment;
376 /* For a memory reference expression EXP compute values M and N such that M
377 divides (&EXP - N) and such that N < M. If these numbers can be determined,
378 store M in alignp and N in *BITPOSP and return true. Otherwise return false
379 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
381 bool
382 get_object_alignment_1 (tree exp, unsigned int *alignp,
383 unsigned HOST_WIDE_INT *bitposp)
385 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
386 with it. */
387 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
388 exp = TREE_OPERAND (exp, 0);
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
506 return align;
509 /* Return the number of leading non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513 unsigned
514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518 unsigned n;
520 if (eltsize == 1)
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
530 else
532 for (n = 0; n < maxelts; n++)
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
539 return n;
542 /* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 Additional information about the string accessed may be recorded
560 in DATA. For example, if ARG references an unterminated string,
561 then the declaration will be stored in the DECL field. If the
562 length of the unterminated string can be determined, it'll be
563 stored in the LEN field. Note this length could well be different
564 than what a C strlen call would return.
566 ELTSIZE is 1 for normal single byte character strings, and 2 or
567 4 for wide characer strings. ELTSIZE is by default 1.
569 The value returned is of type `ssizetype'. */
571 tree
572 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
574 /* If we were not passed a DATA pointer, then get one to a local
575 structure. That avoids having to check DATA for NULL before
576 each time we want to use it. */
577 c_strlen_data local_strlen_data = { };
578 if (!data)
579 data = &local_strlen_data;
581 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
583 tree src = STRIP_NOPS (arg);
584 if (TREE_CODE (src) == COND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 tree len1, len2;
589 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
590 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
591 if (tree_int_cst_equal (len1, len2))
592 return len1;
595 if (TREE_CODE (src) == COMPOUND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
597 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
599 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
601 /* Offset from the beginning of the string in bytes. */
602 tree byteoff;
603 tree memsize;
604 tree decl;
605 src = string_constant (src, &byteoff, &memsize, &decl);
606 if (src == 0)
607 return NULL_TREE;
609 /* Determine the size of the string element. */
610 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
611 return NULL_TREE;
613 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
614 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
615 in case the latter is less than the size of the array, such as when
616 SRC refers to a short string literal used to initialize a large array.
617 In that case, the elements of the array after the terminating NUL are
618 all NUL. */
619 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
620 strelts = strelts / eltsize;
622 if (!tree_fits_uhwi_p (memsize))
623 return NULL_TREE;
625 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
627 /* PTR can point to the byte representation of any string type, including
628 char* and wchar_t*. */
629 const char *ptr = TREE_STRING_POINTER (src);
631 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
633 /* The code below works only for single byte character types. */
634 if (eltsize != 1)
635 return NULL_TREE;
637 /* If the string has an internal NUL character followed by any
638 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 the offset to the following NUL if we don't know where to
640 start searching for it. */
641 unsigned len = string_length (ptr, eltsize, strelts);
643 /* Return when an embedded null character is found or none at all.
644 In the latter case, set the DECL/LEN field in the DATA structure
645 so that callers may examine them. */
646 if (len + 1 < strelts)
647 return NULL_TREE;
648 else if (len >= maxelts)
650 data->decl = decl;
651 data->off = byteoff;
652 data->minlen = ssize_int (len);
653 return NULL_TREE;
656 /* For empty strings the result should be zero. */
657 if (len == 0)
658 return ssize_int (0);
660 /* We don't know the starting offset, but we do know that the string
661 has no internal zero bytes. If the offset falls within the bounds
662 of the string subtract the offset from the length of the string,
663 and return that. Otherwise the length is zero. Take care to
664 use SAVE_EXPR in case the OFFSET has side-effects. */
665 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
666 : byteoff;
667 offsave = fold_convert_loc (loc, sizetype, offsave);
668 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
669 size_int (len));
670 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
671 offsave);
672 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
673 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
674 build_zero_cst (ssizetype));
677 /* Offset from the beginning of the string in elements. */
678 HOST_WIDE_INT eltoff;
680 /* We have a known offset into the string. Start searching there for
681 a null character if we can represent it as a single HOST_WIDE_INT. */
682 if (byteoff == 0)
683 eltoff = 0;
684 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
685 eltoff = -1;
686 else
687 eltoff = tree_to_uhwi (byteoff) / eltsize;
689 /* If the offset is known to be out of bounds, warn, and call strlen at
690 runtime. */
691 if (eltoff < 0 || eltoff >= maxelts)
693 /* Suppress multiple warnings for propagated constant strings. */
694 if (only_value != 2
695 && !warning_suppressed_p (arg, OPT_Warray_bounds)
696 && warning_at (loc, OPT_Warray_bounds,
697 "offset %qwi outside bounds of constant string",
698 eltoff))
700 if (decl)
701 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
702 suppress_warning (arg, OPT_Warray_bounds);
704 return NULL_TREE;
707 /* If eltoff is larger than strelts but less than maxelts the
708 string length is zero, since the excess memory will be zero. */
709 if (eltoff > strelts)
710 return ssize_int (0);
712 /* Use strlen to search for the first zero byte. Since any strings
713 constructed with build_string will have nulls appended, we win even
714 if we get handed something like (char[4])"abcd".
716 Since ELTOFF is our starting index into the string, no further
717 calculation is needed. */
718 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
719 strelts - eltoff);
721 /* Don't know what to return if there was no zero termination.
722 Ideally this would turn into a gcc_checking_assert over time.
723 Set DECL/LEN so callers can examine them. */
724 if (len >= maxelts - eltoff)
726 data->decl = decl;
727 data->off = byteoff;
728 data->minlen = ssize_int (len);
729 return NULL_TREE;
732 return ssize_int (len);
735 /* Return a constant integer corresponding to target reading
736 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
737 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
738 are assumed to be zero, otherwise it reads as many characters
739 as needed. */
742 c_readstr (const char *str, scalar_int_mode mode,
743 bool null_terminated_p/*=true*/)
745 HOST_WIDE_INT ch;
746 unsigned int i, j;
747 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
749 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
750 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
751 / HOST_BITS_PER_WIDE_INT;
753 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
754 for (i = 0; i < len; i++)
755 tmp[i] = 0;
757 ch = 1;
758 for (i = 0; i < GET_MODE_SIZE (mode); i++)
760 j = i;
761 if (WORDS_BIG_ENDIAN)
762 j = GET_MODE_SIZE (mode) - i - 1;
763 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
764 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
765 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
766 j *= BITS_PER_UNIT;
768 if (ch || !null_terminated_p)
769 ch = (unsigned char) str[i];
770 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
773 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
774 return immed_wide_int_const (c, mode);
777 /* Cast a target constant CST to target CHAR and if that value fits into
778 host char type, return zero and put that value into variable pointed to by
779 P. */
781 static int
782 target_char_cast (tree cst, char *p)
784 unsigned HOST_WIDE_INT val, hostval;
786 if (TREE_CODE (cst) != INTEGER_CST
787 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
788 return 1;
790 /* Do not care if it fits or not right here. */
791 val = TREE_INT_CST_LOW (cst);
793 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
794 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
796 hostval = val;
797 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
798 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
800 if (val != hostval)
801 return 1;
803 *p = hostval;
804 return 0;
807 /* Similar to save_expr, but assumes that arbitrary code is not executed
808 in between the multiple evaluations. In particular, we assume that a
809 non-addressable local variable will not be modified. */
811 static tree
812 builtin_save_expr (tree exp)
814 if (TREE_CODE (exp) == SSA_NAME
815 || (TREE_ADDRESSABLE (exp) == 0
816 && (TREE_CODE (exp) == PARM_DECL
817 || (VAR_P (exp) && !TREE_STATIC (exp)))))
818 return exp;
820 return save_expr (exp);
823 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
824 times to get the address of either a higher stack frame, or a return
825 address located within it (depending on FNDECL_CODE). */
827 static rtx
828 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
830 int i;
831 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
832 if (tem == NULL_RTX)
834 /* For a zero count with __builtin_return_address, we don't care what
835 frame address we return, because target-specific definitions will
836 override us. Therefore frame pointer elimination is OK, and using
837 the soft frame pointer is OK.
839 For a nonzero count, or a zero count with __builtin_frame_address,
840 we require a stable offset from the current frame pointer to the
841 previous one, so we must use the hard frame pointer, and
842 we must disable frame pointer elimination. */
843 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
844 tem = frame_pointer_rtx;
845 else
847 tem = hard_frame_pointer_rtx;
849 /* Tell reload not to eliminate the frame pointer. */
850 crtl->accesses_prior_frames = 1;
854 if (count > 0)
855 SETUP_FRAME_ADDRESSES ();
857 /* On the SPARC, the return address is not in the frame, it is in a
858 register. There is no way to access it off of the current frame
859 pointer, but it can be accessed off the previous frame pointer by
860 reading the value from the register window save area. */
861 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
862 count--;
864 /* Scan back COUNT frames to the specified frame. */
865 for (i = 0; i < count; i++)
867 /* Assume the dynamic chain pointer is in the word that the
868 frame address points to, unless otherwise specified. */
869 tem = DYNAMIC_CHAIN_ADDRESS (tem);
870 tem = memory_address (Pmode, tem);
871 tem = gen_frame_mem (Pmode, tem);
872 tem = copy_to_reg (tem);
875 /* For __builtin_frame_address, return what we've got. But, on
876 the SPARC for example, we may have to add a bias. */
877 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
878 return FRAME_ADDR_RTX (tem);
880 /* For __builtin_return_address, get the return address from that frame. */
881 #ifdef RETURN_ADDR_RTX
882 tem = RETURN_ADDR_RTX (count, tem);
883 #else
884 tem = memory_address (Pmode,
885 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
886 tem = gen_frame_mem (Pmode, tem);
887 #endif
888 return tem;
891 /* Alias set used for setjmp buffer. */
892 static alias_set_type setjmp_alias_set = -1;
894 /* Construct the leading half of a __builtin_setjmp call. Control will
895 return to RECEIVER_LABEL. This is also called directly by the SJLJ
896 exception handling code. */
898 void
899 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
901 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
902 rtx stack_save;
903 rtx mem;
905 if (setjmp_alias_set == -1)
906 setjmp_alias_set = new_alias_set ();
908 buf_addr = convert_memory_address (Pmode, buf_addr);
910 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
912 /* We store the frame pointer and the address of receiver_label in
913 the buffer and use the rest of it for the stack save area, which
914 is machine-dependent. */
916 mem = gen_rtx_MEM (Pmode, buf_addr);
917 set_mem_alias_set (mem, setjmp_alias_set);
918 emit_move_insn (mem, hard_frame_pointer_rtx);
920 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
921 GET_MODE_SIZE (Pmode))),
922 set_mem_alias_set (mem, setjmp_alias_set);
924 emit_move_insn (validize_mem (mem),
925 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
927 stack_save = gen_rtx_MEM (sa_mode,
928 plus_constant (Pmode, buf_addr,
929 2 * GET_MODE_SIZE (Pmode)));
930 set_mem_alias_set (stack_save, setjmp_alias_set);
931 emit_stack_save (SAVE_NONLOCAL, &stack_save);
933 /* If there is further processing to do, do it. */
934 if (targetm.have_builtin_setjmp_setup ())
935 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
937 /* We have a nonlocal label. */
938 cfun->has_nonlocal_label = 1;
941 /* Construct the trailing part of a __builtin_setjmp call. This is
942 also called directly by the SJLJ exception handling code.
943 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
945 void
946 expand_builtin_setjmp_receiver (rtx receiver_label)
948 rtx chain;
950 /* Mark the FP as used when we get here, so we have to make sure it's
951 marked as used by this function. */
952 emit_use (hard_frame_pointer_rtx);
954 /* Mark the static chain as clobbered here so life information
955 doesn't get messed up for it. */
956 chain = rtx_for_static_chain (current_function_decl, true);
957 if (chain && REG_P (chain))
958 emit_clobber (chain);
960 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
962 /* If the argument pointer can be eliminated in favor of the
963 frame pointer, we don't need to restore it. We assume here
964 that if such an elimination is present, it can always be used.
965 This is the case on all known machines; if we don't make this
966 assumption, we do unnecessary saving on many machines. */
967 size_t i;
968 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
970 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
971 if (elim_regs[i].from == ARG_POINTER_REGNUM
972 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
973 break;
975 if (i == ARRAY_SIZE (elim_regs))
977 /* Now restore our arg pointer from the address at which it
978 was saved in our stack frame. */
979 emit_move_insn (crtl->args.internal_arg_pointer,
980 copy_to_reg (get_arg_pointer_save_area ()));
984 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
985 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
986 else if (targetm.have_nonlocal_goto_receiver ())
987 emit_insn (targetm.gen_nonlocal_goto_receiver ());
988 else
989 { /* Nothing */ }
991 /* We must not allow the code we just generated to be reordered by
992 scheduling. Specifically, the update of the frame pointer must
993 happen immediately, not later. */
994 emit_insn (gen_blockage ());
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1002 static void
1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1005 rtx fp, lab, stack;
1006 rtx_insn *insn, *last;
1007 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1009 /* DRAP is needed for stack realign if longjmp is expanded to current
1010 function */
1011 if (SUPPORTS_STACK_ALIGNMENT)
1012 crtl->need_drap = true;
1014 if (setjmp_alias_set == -1)
1015 setjmp_alias_set = new_alias_set ();
1017 buf_addr = convert_memory_address (Pmode, buf_addr);
1019 buf_addr = force_reg (Pmode, buf_addr);
1021 /* We require that the user must pass a second argument of 1, because
1022 that is what builtin_setjmp will return. */
1023 gcc_assert (value == const1_rtx);
1025 last = get_last_insn ();
1026 if (targetm.have_builtin_longjmp ())
1027 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1028 else
1030 fp = gen_rtx_MEM (Pmode, buf_addr);
1031 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1032 GET_MODE_SIZE (Pmode)));
1034 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1035 2 * GET_MODE_SIZE (Pmode)));
1036 set_mem_alias_set (fp, setjmp_alias_set);
1037 set_mem_alias_set (lab, setjmp_alias_set);
1038 set_mem_alias_set (stack, setjmp_alias_set);
1040 /* Pick up FP, label, and SP from the block and jump. This code is
1041 from expand_goto in stmt.c; see there for detailed comments. */
1042 if (targetm.have_nonlocal_goto ())
1043 /* We have to pass a value to the nonlocal_goto pattern that will
1044 get copied into the static_chain pointer, but it does not matter
1045 what that value is, because builtin_setjmp does not use it. */
1046 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1047 else
1049 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1050 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1052 lab = copy_to_reg (lab);
1054 /* Restore the frame pointer and stack pointer. We must use a
1055 temporary since the setjmp buffer may be a local. */
1056 fp = copy_to_reg (fp);
1057 emit_stack_restore (SAVE_NONLOCAL, stack);
1059 /* Ensure the frame pointer move is not optimized. */
1060 emit_insn (gen_blockage ());
1061 emit_clobber (hard_frame_pointer_rtx);
1062 emit_clobber (frame_pointer_rtx);
1063 emit_move_insn (hard_frame_pointer_rtx, fp);
1065 emit_use (hard_frame_pointer_rtx);
1066 emit_use (stack_pointer_rtx);
1067 emit_indirect_jump (lab);
1071 /* Search backwards and mark the jump insn as a non-local goto.
1072 Note that this precludes the use of __builtin_longjmp to a
1073 __builtin_setjmp target in the same function. However, we've
1074 already cautioned the user that these functions are for
1075 internal exception handling use only. */
1076 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1078 gcc_assert (insn != last);
1080 if (JUMP_P (insn))
1082 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1083 break;
1085 else if (CALL_P (insn))
1086 break;
1090 static inline bool
1091 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1093 return (iter->i < iter->n);
1096 /* This function validates the types of a function call argument list
1097 against a specified list of tree_codes. If the last specifier is a 0,
1098 that represents an ellipsis, otherwise the last specifier must be a
1099 VOID_TYPE. */
1101 static bool
1102 validate_arglist (const_tree callexpr, ...)
1104 enum tree_code code;
1105 bool res = 0;
1106 va_list ap;
1107 const_call_expr_arg_iterator iter;
1108 const_tree arg;
1110 va_start (ap, callexpr);
1111 init_const_call_expr_arg_iterator (callexpr, &iter);
1113 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1114 tree fn = CALL_EXPR_FN (callexpr);
1115 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1117 for (unsigned argno = 1; ; ++argno)
1119 code = (enum tree_code) va_arg (ap, int);
1121 switch (code)
1123 case 0:
1124 /* This signifies an ellipses, any further arguments are all ok. */
1125 res = true;
1126 goto end;
1127 case VOID_TYPE:
1128 /* This signifies an endlink, if no arguments remain, return
1129 true, otherwise return false. */
1130 res = !more_const_call_expr_args_p (&iter);
1131 goto end;
1132 case POINTER_TYPE:
1133 /* The actual argument must be nonnull when either the whole
1134 called function has been declared nonnull, or when the formal
1135 argument corresponding to the actual argument has been. */
1136 if (argmap
1137 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1139 arg = next_const_call_expr_arg (&iter);
1140 if (!validate_arg (arg, code) || integer_zerop (arg))
1141 goto end;
1142 break;
1144 /* FALLTHRU */
1145 default:
1146 /* If no parameters remain or the parameter's code does not
1147 match the specified code, return false. Otherwise continue
1148 checking any remaining arguments. */
1149 arg = next_const_call_expr_arg (&iter);
1150 if (!validate_arg (arg, code))
1151 goto end;
1152 break;
1156 /* We need gotos here since we can only have one VA_CLOSE in a
1157 function. */
1158 end: ;
1159 va_end (ap);
1161 BITMAP_FREE (argmap);
1163 return res;
1166 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1167 and the address of the save area. */
1169 static rtx
1170 expand_builtin_nonlocal_goto (tree exp)
1172 tree t_label, t_save_area;
1173 rtx r_label, r_save_area, r_fp, r_sp;
1174 rtx_insn *insn;
1176 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1177 return NULL_RTX;
1179 t_label = CALL_EXPR_ARG (exp, 0);
1180 t_save_area = CALL_EXPR_ARG (exp, 1);
1182 r_label = expand_normal (t_label);
1183 r_label = convert_memory_address (Pmode, r_label);
1184 r_save_area = expand_normal (t_save_area);
1185 r_save_area = convert_memory_address (Pmode, r_save_area);
1186 /* Copy the address of the save location to a register just in case it was
1187 based on the frame pointer. */
1188 r_save_area = copy_to_reg (r_save_area);
1189 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1190 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1191 plus_constant (Pmode, r_save_area,
1192 GET_MODE_SIZE (Pmode)));
1194 crtl->has_nonlocal_goto = 1;
1196 /* ??? We no longer need to pass the static chain value, afaik. */
1197 if (targetm.have_nonlocal_goto ())
1198 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1199 else
1201 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1202 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1204 r_label = copy_to_reg (r_label);
1206 /* Restore the frame pointer and stack pointer. We must use a
1207 temporary since the setjmp buffer may be a local. */
1208 r_fp = copy_to_reg (r_fp);
1209 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1211 /* Ensure the frame pointer move is not optimized. */
1212 emit_insn (gen_blockage ());
1213 emit_clobber (hard_frame_pointer_rtx);
1214 emit_clobber (frame_pointer_rtx);
1215 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1217 /* USE of hard_frame_pointer_rtx added for consistency;
1218 not clear if really needed. */
1219 emit_use (hard_frame_pointer_rtx);
1220 emit_use (stack_pointer_rtx);
1222 /* If the architecture is using a GP register, we must
1223 conservatively assume that the target function makes use of it.
1224 The prologue of functions with nonlocal gotos must therefore
1225 initialize the GP register to the appropriate value, and we
1226 must then make sure that this value is live at the point
1227 of the jump. (Note that this doesn't necessarily apply
1228 to targets with a nonlocal_goto pattern; they are free
1229 to implement it in their own way. Note also that this is
1230 a no-op if the GP register is a global invariant.) */
1231 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1232 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1233 emit_use (pic_offset_table_rtx);
1235 emit_indirect_jump (r_label);
1238 /* Search backwards to the jump insn and mark it as a
1239 non-local goto. */
1240 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1242 if (JUMP_P (insn))
1244 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1245 break;
1247 else if (CALL_P (insn))
1248 break;
1251 return const0_rtx;
1254 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1255 (not all will be used on all machines) that was passed to __builtin_setjmp.
1256 It updates the stack pointer in that block to the current value. This is
1257 also called directly by the SJLJ exception handling code. */
1259 void
1260 expand_builtin_update_setjmp_buf (rtx buf_addr)
1262 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1263 buf_addr = convert_memory_address (Pmode, buf_addr);
1264 rtx stack_save
1265 = gen_rtx_MEM (sa_mode,
1266 memory_address
1267 (sa_mode,
1268 plus_constant (Pmode, buf_addr,
1269 2 * GET_MODE_SIZE (Pmode))));
1271 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1274 /* Expand a call to __builtin_prefetch. For a target that does not support
1275 data prefetch, evaluate the memory address argument in case it has side
1276 effects. */
1278 static void
1279 expand_builtin_prefetch (tree exp)
1281 tree arg0, arg1, arg2;
1282 int nargs;
1283 rtx op0, op1, op2;
1285 if (!validate_arglist (exp, POINTER_TYPE, 0))
1286 return;
1288 arg0 = CALL_EXPR_ARG (exp, 0);
1290 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1291 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1292 locality). */
1293 nargs = call_expr_nargs (exp);
1294 if (nargs > 1)
1295 arg1 = CALL_EXPR_ARG (exp, 1);
1296 else
1297 arg1 = integer_zero_node;
1298 if (nargs > 2)
1299 arg2 = CALL_EXPR_ARG (exp, 2);
1300 else
1301 arg2 = integer_three_node;
1303 /* Argument 0 is an address. */
1304 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1306 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1307 if (TREE_CODE (arg1) != INTEGER_CST)
1309 error ("second argument to %<__builtin_prefetch%> must be a constant");
1310 arg1 = integer_zero_node;
1312 op1 = expand_normal (arg1);
1313 /* Argument 1 must be either zero or one. */
1314 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1316 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1317 " using zero");
1318 op1 = const0_rtx;
1321 /* Argument 2 (locality) must be a compile-time constant int. */
1322 if (TREE_CODE (arg2) != INTEGER_CST)
1324 error ("third argument to %<__builtin_prefetch%> must be a constant");
1325 arg2 = integer_zero_node;
1327 op2 = expand_normal (arg2);
1328 /* Argument 2 must be 0, 1, 2, or 3. */
1329 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1331 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1332 op2 = const0_rtx;
1335 if (targetm.have_prefetch ())
1337 class expand_operand ops[3];
1339 create_address_operand (&ops[0], op0);
1340 create_integer_operand (&ops[1], INTVAL (op1));
1341 create_integer_operand (&ops[2], INTVAL (op2));
1342 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1343 return;
1346 /* Don't do anything with direct references to volatile memory, but
1347 generate code to handle other side effects. */
1348 if (!MEM_P (op0) && side_effects_p (op0))
1349 emit_insn (op0);
1352 /* Get a MEM rtx for expression EXP which is the address of an operand
1353 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1354 the maximum length of the block of memory that might be accessed or
1355 NULL if unknown. */
1358 get_memory_rtx (tree exp, tree len)
1360 tree orig_exp = exp;
1361 rtx addr, mem;
1363 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1364 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1365 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1366 exp = TREE_OPERAND (exp, 0);
1368 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1369 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1371 /* Get an expression we can use to find the attributes to assign to MEM.
1372 First remove any nops. */
1373 while (CONVERT_EXPR_P (exp)
1374 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1375 exp = TREE_OPERAND (exp, 0);
1377 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1378 (as builtin stringops may alias with anything). */
1379 exp = fold_build2 (MEM_REF,
1380 build_array_type (char_type_node,
1381 build_range_type (sizetype,
1382 size_one_node, len)),
1383 exp, build_int_cst (ptr_type_node, 0));
1385 /* If the MEM_REF has no acceptable address, try to get the base object
1386 from the original address we got, and build an all-aliasing
1387 unknown-sized access to that one. */
1388 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1389 set_mem_attributes (mem, exp, 0);
1390 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1391 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1392 0))))
1394 exp = build_fold_addr_expr (exp);
1395 exp = fold_build2 (MEM_REF,
1396 build_array_type (char_type_node,
1397 build_range_type (sizetype,
1398 size_zero_node,
1399 NULL)),
1400 exp, build_int_cst (ptr_type_node, 0));
1401 set_mem_attributes (mem, exp, 0);
1403 set_mem_alias_set (mem, 0);
1404 return mem;
1407 /* Built-in functions to perform an untyped call and return. */
1409 #define apply_args_mode \
1410 (this_target_builtins->x_apply_args_mode)
1411 #define apply_result_mode \
1412 (this_target_builtins->x_apply_result_mode)
1414 /* Return the size required for the block returned by __builtin_apply_args,
1415 and initialize apply_args_mode. */
1417 static int
1418 apply_args_size (void)
1420 static int size = -1;
1421 int align;
1422 unsigned int regno;
1424 /* The values computed by this function never change. */
1425 if (size < 0)
1427 /* The first value is the incoming arg-pointer. */
1428 size = GET_MODE_SIZE (Pmode);
1430 /* The second value is the structure value address unless this is
1431 passed as an "invisible" first argument. */
1432 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1433 size += GET_MODE_SIZE (Pmode);
1435 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1436 if (FUNCTION_ARG_REGNO_P (regno))
1438 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1440 gcc_assert (mode != VOIDmode);
1442 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1443 if (size % align != 0)
1444 size = CEIL (size, align) * align;
1445 size += GET_MODE_SIZE (mode);
1446 apply_args_mode[regno] = mode;
1448 else
1450 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1453 return size;
1456 /* Return the size required for the block returned by __builtin_apply,
1457 and initialize apply_result_mode. */
1459 static int
1460 apply_result_size (void)
1462 static int size = -1;
1463 int align, regno;
1465 /* The values computed by this function never change. */
1466 if (size < 0)
1468 size = 0;
1470 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1471 if (targetm.calls.function_value_regno_p (regno))
1473 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1475 gcc_assert (mode != VOIDmode);
1477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1478 if (size % align != 0)
1479 size = CEIL (size, align) * align;
1480 size += GET_MODE_SIZE (mode);
1481 apply_result_mode[regno] = mode;
1483 else
1484 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1486 /* Allow targets that use untyped_call and untyped_return to override
1487 the size so that machine-specific information can be stored here. */
1488 #ifdef APPLY_RESULT_SIZE
1489 size = APPLY_RESULT_SIZE;
1490 #endif
1492 return size;
1495 /* Create a vector describing the result block RESULT. If SAVEP is true,
1496 the result block is used to save the values; otherwise it is used to
1497 restore the values. */
1499 static rtx
1500 result_vector (int savep, rtx result)
1502 int regno, size, align, nelts;
1503 fixed_size_mode mode;
1504 rtx reg, mem;
1505 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1507 size = nelts = 0;
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if ((mode = apply_result_mode[regno]) != VOIDmode)
1511 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1512 if (size % align != 0)
1513 size = CEIL (size, align) * align;
1514 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1515 mem = adjust_address (result, mode, size);
1516 savevec[nelts++] = (savep
1517 ? gen_rtx_SET (mem, reg)
1518 : gen_rtx_SET (reg, mem));
1519 size += GET_MODE_SIZE (mode);
1521 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1524 /* Save the state required to perform an untyped call with the same
1525 arguments as were passed to the current function. */
1527 static rtx
1528 expand_builtin_apply_args_1 (void)
1530 rtx registers, tem;
1531 int size, align, regno;
1532 fixed_size_mode mode;
1533 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1535 /* Create a block where the arg-pointer, structure value address,
1536 and argument registers can be saved. */
1537 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1539 /* Walk past the arg-pointer and structure value address. */
1540 size = GET_MODE_SIZE (Pmode);
1541 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1542 size += GET_MODE_SIZE (Pmode);
1544 /* Save each register used in calling a function to the block. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1552 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1554 emit_move_insn (adjust_address (registers, mode, size), tem);
1555 size += GET_MODE_SIZE (mode);
1558 /* Save the arg pointer to the block. */
1559 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1560 /* We need the pointer as the caller actually passed them to us, not
1561 as we might have pretended they were passed. Make sure it's a valid
1562 operand, as emit_move_insn isn't expected to handle a PLUS. */
1563 if (STACK_GROWS_DOWNWARD)
1565 = force_operand (plus_constant (Pmode, tem,
1566 crtl->args.pretend_args_size),
1567 NULL_RTX);
1568 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1570 size = GET_MODE_SIZE (Pmode);
1572 /* Save the structure value address unless this is passed as an
1573 "invisible" first argument. */
1574 if (struct_incoming_value)
1575 emit_move_insn (adjust_address (registers, Pmode, size),
1576 copy_to_reg (struct_incoming_value));
1578 /* Return the address of the block. */
1579 return copy_addr_to_reg (XEXP (registers, 0));
1582 /* __builtin_apply_args returns block of memory allocated on
1583 the stack into which is stored the arg pointer, structure
1584 value address, static chain, and all the registers that might
1585 possibly be used in performing a function call. The code is
1586 moved to the start of the function so the incoming values are
1587 saved. */
1589 static rtx
1590 expand_builtin_apply_args (void)
1592 /* Don't do __builtin_apply_args more than once in a function.
1593 Save the result of the first call and reuse it. */
1594 if (apply_args_value != 0)
1595 return apply_args_value;
1597 /* When this function is called, it means that registers must be
1598 saved on entry to this function. So we migrate the
1599 call to the first insn of this function. */
1600 rtx temp;
1602 start_sequence ();
1603 temp = expand_builtin_apply_args_1 ();
1604 rtx_insn *seq = get_insns ();
1605 end_sequence ();
1607 apply_args_value = temp;
1609 /* Put the insns after the NOTE that starts the function.
1610 If this is inside a start_sequence, make the outer-level insn
1611 chain current, so the code is placed at the start of the
1612 function. If internal_arg_pointer is a non-virtual pseudo,
1613 it needs to be placed after the function that initializes
1614 that pseudo. */
1615 push_topmost_sequence ();
1616 if (REG_P (crtl->args.internal_arg_pointer)
1617 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1618 emit_insn_before (seq, parm_birth_insn);
1619 else
1620 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1621 pop_topmost_sequence ();
1622 return temp;
1626 /* Perform an untyped call and save the state required to perform an
1627 untyped return of whatever value was returned by the given function. */
1629 static rtx
1630 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1632 int size, align, regno;
1633 fixed_size_mode mode;
1634 rtx incoming_args, result, reg, dest, src;
1635 rtx_call_insn *call_insn;
1636 rtx old_stack_level = 0;
1637 rtx call_fusage = 0;
1638 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1640 arguments = convert_memory_address (Pmode, arguments);
1642 /* Create a block where the return registers can be saved. */
1643 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1645 /* Fetch the arg pointer from the ARGUMENTS block. */
1646 incoming_args = gen_reg_rtx (Pmode);
1647 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1648 if (!STACK_GROWS_DOWNWARD)
1649 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1650 incoming_args, 0, OPTAB_LIB_WIDEN);
1652 /* Push a new argument block and copy the arguments. Do not allow
1653 the (potential) memcpy call below to interfere with our stack
1654 manipulations. */
1655 do_pending_stack_adjust ();
1656 NO_DEFER_POP;
1658 /* Save the stack with nonlocal if available. */
1659 if (targetm.have_save_stack_nonlocal ())
1660 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1661 else
1662 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1664 /* Allocate a block of memory onto the stack and copy the memory
1665 arguments to the outgoing arguments address. We can pass TRUE
1666 as the 4th argument because we just saved the stack pointer
1667 and will restore it right after the call. */
1668 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1670 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1671 may have already set current_function_calls_alloca to true.
1672 current_function_calls_alloca won't be set if argsize is zero,
1673 so we have to guarantee need_drap is true here. */
1674 if (SUPPORTS_STACK_ALIGNMENT)
1675 crtl->need_drap = true;
1677 dest = virtual_outgoing_args_rtx;
1678 if (!STACK_GROWS_DOWNWARD)
1680 if (CONST_INT_P (argsize))
1681 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1682 else
1683 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1685 dest = gen_rtx_MEM (BLKmode, dest);
1686 set_mem_align (dest, PARM_BOUNDARY);
1687 src = gen_rtx_MEM (BLKmode, incoming_args);
1688 set_mem_align (src, PARM_BOUNDARY);
1689 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1691 /* Refer to the argument block. */
1692 apply_args_size ();
1693 arguments = gen_rtx_MEM (BLKmode, arguments);
1694 set_mem_align (arguments, PARM_BOUNDARY);
1696 /* Walk past the arg-pointer and structure value address. */
1697 size = GET_MODE_SIZE (Pmode);
1698 if (struct_value)
1699 size += GET_MODE_SIZE (Pmode);
1701 /* Restore each of the registers previously saved. Make USE insns
1702 for each of these registers for use in making the call. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_args_mode[regno]) != VOIDmode)
1706 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1707 if (size % align != 0)
1708 size = CEIL (size, align) * align;
1709 reg = gen_rtx_REG (mode, regno);
1710 emit_move_insn (reg, adjust_address (arguments, mode, size));
1711 use_reg (&call_fusage, reg);
1712 size += GET_MODE_SIZE (mode);
1715 /* Restore the structure value address unless this is passed as an
1716 "invisible" first argument. */
1717 size = GET_MODE_SIZE (Pmode);
1718 if (struct_value)
1720 rtx value = gen_reg_rtx (Pmode);
1721 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1722 emit_move_insn (struct_value, value);
1723 if (REG_P (struct_value))
1724 use_reg (&call_fusage, struct_value);
1727 /* All arguments and registers used for the call are set up by now! */
1728 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1730 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1731 and we don't want to load it into a register as an optimization,
1732 because prepare_call_address already did it if it should be done. */
1733 if (GET_CODE (function) != SYMBOL_REF)
1734 function = memory_address (FUNCTION_MODE, function);
1736 /* Generate the actual call instruction and save the return value. */
1737 if (targetm.have_untyped_call ())
1739 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1740 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1741 result_vector (1, result));
1742 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1743 if (CALL_P (insn))
1744 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1745 emit_insn (seq);
1747 else if (targetm.have_call_value ())
1749 rtx valreg = 0;
1751 /* Locate the unique return register. It is not possible to
1752 express a call that sets more than one return register using
1753 call_value; use untyped_call for that. In fact, untyped_call
1754 only needs to save the return registers in the given block. */
1755 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1756 if ((mode = apply_result_mode[regno]) != VOIDmode)
1758 gcc_assert (!valreg); /* have_untyped_call required. */
1760 valreg = gen_rtx_REG (mode, regno);
1763 emit_insn (targetm.gen_call_value (valreg,
1764 gen_rtx_MEM (FUNCTION_MODE, function),
1765 const0_rtx, NULL_RTX, const0_rtx));
1767 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1769 else
1770 gcc_unreachable ();
1772 /* Find the CALL insn we just emitted, and attach the register usage
1773 information. */
1774 call_insn = last_call_insn ();
1775 add_function_usage_to (call_insn, call_fusage);
1777 /* Restore the stack. */
1778 if (targetm.have_save_stack_nonlocal ())
1779 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1780 else
1781 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1782 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1784 OK_DEFER_POP;
1786 /* Return the address of the result block. */
1787 result = copy_addr_to_reg (XEXP (result, 0));
1788 return convert_memory_address (ptr_mode, result);
1791 /* Perform an untyped return. */
1793 static void
1794 expand_builtin_return (rtx result)
1796 int size, align, regno;
1797 fixed_size_mode mode;
1798 rtx reg;
1799 rtx_insn *call_fusage = 0;
1801 result = convert_memory_address (Pmode, result);
1803 apply_result_size ();
1804 result = gen_rtx_MEM (BLKmode, result);
1806 if (targetm.have_untyped_return ())
1808 rtx vector = result_vector (0, result);
1809 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1810 emit_barrier ();
1811 return;
1814 /* Restore the return value and note that each value is used. */
1815 size = 0;
1816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1817 if ((mode = apply_result_mode[regno]) != VOIDmode)
1819 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1820 if (size % align != 0)
1821 size = CEIL (size, align) * align;
1822 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1823 emit_move_insn (reg, adjust_address (result, mode, size));
1825 push_to_sequence (call_fusage);
1826 emit_use (reg);
1827 call_fusage = get_insns ();
1828 end_sequence ();
1829 size += GET_MODE_SIZE (mode);
1832 /* Put the USE insns before the return. */
1833 emit_insn (call_fusage);
1835 /* Return whatever values was restored by jumping directly to the end
1836 of the function. */
1837 expand_naked_return ();
1840 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1842 static enum type_class
1843 type_to_class (tree type)
1845 switch (TREE_CODE (type))
1847 case VOID_TYPE: return void_type_class;
1848 case INTEGER_TYPE: return integer_type_class;
1849 case ENUMERAL_TYPE: return enumeral_type_class;
1850 case BOOLEAN_TYPE: return boolean_type_class;
1851 case POINTER_TYPE: return pointer_type_class;
1852 case REFERENCE_TYPE: return reference_type_class;
1853 case OFFSET_TYPE: return offset_type_class;
1854 case REAL_TYPE: return real_type_class;
1855 case COMPLEX_TYPE: return complex_type_class;
1856 case FUNCTION_TYPE: return function_type_class;
1857 case METHOD_TYPE: return method_type_class;
1858 case RECORD_TYPE: return record_type_class;
1859 case UNION_TYPE:
1860 case QUAL_UNION_TYPE: return union_type_class;
1861 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1862 ? string_type_class : array_type_class);
1863 case LANG_TYPE: return lang_type_class;
1864 case OPAQUE_TYPE: return opaque_type_class;
1865 default: return no_type_class;
1869 /* Expand a call EXP to __builtin_classify_type. */
1871 static rtx
1872 expand_builtin_classify_type (tree exp)
1874 if (call_expr_nargs (exp))
1875 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1876 return GEN_INT (no_type_class);
1879 /* This helper macro, meant to be used in mathfn_built_in below, determines
1880 which among a set of builtin math functions is appropriate for a given type
1881 mode. The `F' (float) and `L' (long double) are automatically generated
1882 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1883 types, there are additional types that are considered with 'F32', 'F64',
1884 'F128', etc. suffixes. */
1885 #define CASE_MATHFN(MATHFN) \
1886 CASE_CFN_##MATHFN: \
1887 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1888 fcodel = BUILT_IN_##MATHFN##L ; break;
1889 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1890 types. */
1891 #define CASE_MATHFN_FLOATN(MATHFN) \
1892 CASE_CFN_##MATHFN: \
1893 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1894 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1895 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1896 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1897 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1898 break;
1899 /* Similar to above, but appends _R after any F/L suffix. */
1900 #define CASE_MATHFN_REENT(MATHFN) \
1901 case CFN_BUILT_IN_##MATHFN##_R: \
1902 case CFN_BUILT_IN_##MATHFN##F_R: \
1903 case CFN_BUILT_IN_##MATHFN##L_R: \
1904 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1905 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1907 /* Return a function equivalent to FN but operating on floating-point
1908 values of type TYPE, or END_BUILTINS if no such function exists.
1909 This is purely an operation on function codes; it does not guarantee
1910 that the target actually has an implementation of the function. */
1912 static built_in_function
1913 mathfn_built_in_2 (tree type, combined_fn fn)
1915 tree mtype;
1916 built_in_function fcode, fcodef, fcodel;
1917 built_in_function fcodef16 = END_BUILTINS;
1918 built_in_function fcodef32 = END_BUILTINS;
1919 built_in_function fcodef64 = END_BUILTINS;
1920 built_in_function fcodef128 = END_BUILTINS;
1921 built_in_function fcodef32x = END_BUILTINS;
1922 built_in_function fcodef64x = END_BUILTINS;
1923 built_in_function fcodef128x = END_BUILTINS;
1925 switch (fn)
1927 #define SEQ_OF_CASE_MATHFN \
1928 CASE_MATHFN (ACOS) \
1929 CASE_MATHFN (ACOSH) \
1930 CASE_MATHFN (ASIN) \
1931 CASE_MATHFN (ASINH) \
1932 CASE_MATHFN (ATAN) \
1933 CASE_MATHFN (ATAN2) \
1934 CASE_MATHFN (ATANH) \
1935 CASE_MATHFN (CBRT) \
1936 CASE_MATHFN_FLOATN (CEIL) \
1937 CASE_MATHFN (CEXPI) \
1938 CASE_MATHFN_FLOATN (COPYSIGN) \
1939 CASE_MATHFN (COS) \
1940 CASE_MATHFN (COSH) \
1941 CASE_MATHFN (DREM) \
1942 CASE_MATHFN (ERF) \
1943 CASE_MATHFN (ERFC) \
1944 CASE_MATHFN (EXP) \
1945 CASE_MATHFN (EXP10) \
1946 CASE_MATHFN (EXP2) \
1947 CASE_MATHFN (EXPM1) \
1948 CASE_MATHFN (FABS) \
1949 CASE_MATHFN (FDIM) \
1950 CASE_MATHFN_FLOATN (FLOOR) \
1951 CASE_MATHFN_FLOATN (FMA) \
1952 CASE_MATHFN_FLOATN (FMAX) \
1953 CASE_MATHFN_FLOATN (FMIN) \
1954 CASE_MATHFN (FMOD) \
1955 CASE_MATHFN (FREXP) \
1956 CASE_MATHFN (GAMMA) \
1957 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1958 CASE_MATHFN (HUGE_VAL) \
1959 CASE_MATHFN (HYPOT) \
1960 CASE_MATHFN (ILOGB) \
1961 CASE_MATHFN (ICEIL) \
1962 CASE_MATHFN (IFLOOR) \
1963 CASE_MATHFN (INF) \
1964 CASE_MATHFN (IRINT) \
1965 CASE_MATHFN (IROUND) \
1966 CASE_MATHFN (ISINF) \
1967 CASE_MATHFN (J0) \
1968 CASE_MATHFN (J1) \
1969 CASE_MATHFN (JN) \
1970 CASE_MATHFN (LCEIL) \
1971 CASE_MATHFN (LDEXP) \
1972 CASE_MATHFN (LFLOOR) \
1973 CASE_MATHFN (LGAMMA) \
1974 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1975 CASE_MATHFN (LLCEIL) \
1976 CASE_MATHFN (LLFLOOR) \
1977 CASE_MATHFN (LLRINT) \
1978 CASE_MATHFN (LLROUND) \
1979 CASE_MATHFN (LOG) \
1980 CASE_MATHFN (LOG10) \
1981 CASE_MATHFN (LOG1P) \
1982 CASE_MATHFN (LOG2) \
1983 CASE_MATHFN (LOGB) \
1984 CASE_MATHFN (LRINT) \
1985 CASE_MATHFN (LROUND) \
1986 CASE_MATHFN (MODF) \
1987 CASE_MATHFN (NAN) \
1988 CASE_MATHFN (NANS) \
1989 CASE_MATHFN_FLOATN (NEARBYINT) \
1990 CASE_MATHFN (NEXTAFTER) \
1991 CASE_MATHFN (NEXTTOWARD) \
1992 CASE_MATHFN (POW) \
1993 CASE_MATHFN (POWI) \
1994 CASE_MATHFN (POW10) \
1995 CASE_MATHFN (REMAINDER) \
1996 CASE_MATHFN (REMQUO) \
1997 CASE_MATHFN_FLOATN (RINT) \
1998 CASE_MATHFN_FLOATN (ROUND) \
1999 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2000 CASE_MATHFN (SCALB) \
2001 CASE_MATHFN (SCALBLN) \
2002 CASE_MATHFN (SCALBN) \
2003 CASE_MATHFN (SIGNBIT) \
2004 CASE_MATHFN (SIGNIFICAND) \
2005 CASE_MATHFN (SIN) \
2006 CASE_MATHFN (SINCOS) \
2007 CASE_MATHFN (SINH) \
2008 CASE_MATHFN_FLOATN (SQRT) \
2009 CASE_MATHFN (TAN) \
2010 CASE_MATHFN (TANH) \
2011 CASE_MATHFN (TGAMMA) \
2012 CASE_MATHFN_FLOATN (TRUNC) \
2013 CASE_MATHFN (Y0) \
2014 CASE_MATHFN (Y1) \
2015 CASE_MATHFN (YN)
2017 SEQ_OF_CASE_MATHFN
2019 default:
2020 return END_BUILTINS;
2023 mtype = TYPE_MAIN_VARIANT (type);
2024 if (mtype == double_type_node)
2025 return fcode;
2026 else if (mtype == float_type_node)
2027 return fcodef;
2028 else if (mtype == long_double_type_node)
2029 return fcodel;
2030 else if (mtype == float16_type_node)
2031 return fcodef16;
2032 else if (mtype == float32_type_node)
2033 return fcodef32;
2034 else if (mtype == float64_type_node)
2035 return fcodef64;
2036 else if (mtype == float128_type_node)
2037 return fcodef128;
2038 else if (mtype == float32x_type_node)
2039 return fcodef32x;
2040 else if (mtype == float64x_type_node)
2041 return fcodef64x;
2042 else if (mtype == float128x_type_node)
2043 return fcodef128x;
2044 else
2045 return END_BUILTINS;
2048 #undef CASE_MATHFN
2049 #undef CASE_MATHFN_FLOATN
2050 #undef CASE_MATHFN_REENT
2052 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2053 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2054 otherwise use the explicit declaration. If we can't do the conversion,
2055 return null. */
2057 static tree
2058 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2060 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2061 if (fcode2 == END_BUILTINS)
2062 return NULL_TREE;
2064 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2065 return NULL_TREE;
2067 return builtin_decl_explicit (fcode2);
2070 /* Like mathfn_built_in_1, but always use the implicit array. */
2072 tree
2073 mathfn_built_in (tree type, combined_fn fn)
2075 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2078 /* Like mathfn_built_in_1, but take a built_in_function and
2079 always use the implicit array. */
2081 tree
2082 mathfn_built_in (tree type, enum built_in_function fn)
2084 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2087 /* Return the type associated with a built in function, i.e., the one
2088 to be passed to mathfn_built_in to get the type-specific
2089 function. */
2091 tree
2092 mathfn_built_in_type (combined_fn fn)
2094 #define CASE_MATHFN(MATHFN) \
2095 case CFN_BUILT_IN_##MATHFN: \
2096 return double_type_node; \
2097 case CFN_BUILT_IN_##MATHFN##F: \
2098 return float_type_node; \
2099 case CFN_BUILT_IN_##MATHFN##L: \
2100 return long_double_type_node;
2102 #define CASE_MATHFN_FLOATN(MATHFN) \
2103 CASE_MATHFN(MATHFN) \
2104 case CFN_BUILT_IN_##MATHFN##F16: \
2105 return float16_type_node; \
2106 case CFN_BUILT_IN_##MATHFN##F32: \
2107 return float32_type_node; \
2108 case CFN_BUILT_IN_##MATHFN##F64: \
2109 return float64_type_node; \
2110 case CFN_BUILT_IN_##MATHFN##F128: \
2111 return float128_type_node; \
2112 case CFN_BUILT_IN_##MATHFN##F32X: \
2113 return float32x_type_node; \
2114 case CFN_BUILT_IN_##MATHFN##F64X: \
2115 return float64x_type_node; \
2116 case CFN_BUILT_IN_##MATHFN##F128X: \
2117 return float128x_type_node;
2119 /* Similar to above, but appends _R after any F/L suffix. */
2120 #define CASE_MATHFN_REENT(MATHFN) \
2121 case CFN_BUILT_IN_##MATHFN##_R: \
2122 return double_type_node; \
2123 case CFN_BUILT_IN_##MATHFN##F_R: \
2124 return float_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##L_R: \
2126 return long_double_type_node;
2128 switch (fn)
2130 SEQ_OF_CASE_MATHFN
2132 default:
2133 return NULL_TREE;
2136 #undef CASE_MATHFN
2137 #undef CASE_MATHFN_FLOATN
2138 #undef CASE_MATHFN_REENT
2139 #undef SEQ_OF_CASE_MATHFN
2142 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2143 return its code, otherwise return IFN_LAST. Note that this function
2144 only tests whether the function is defined in internals.def, not whether
2145 it is actually available on the target. */
2147 internal_fn
2148 associated_internal_fn (tree fndecl)
2150 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2151 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2152 switch (DECL_FUNCTION_CODE (fndecl))
2154 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2155 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2156 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2157 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2158 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2159 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #include "internal-fn.def"
2163 CASE_FLT_FN (BUILT_IN_POW10):
2164 return IFN_EXP10;
2166 CASE_FLT_FN (BUILT_IN_DREM):
2167 return IFN_REMAINDER;
2169 CASE_FLT_FN (BUILT_IN_SCALBN):
2170 CASE_FLT_FN (BUILT_IN_SCALBLN):
2171 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2172 return IFN_LDEXP;
2173 return IFN_LAST;
2175 default:
2176 return IFN_LAST;
2180 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2181 on the current target by a call to an internal function, return the
2182 code of that internal function, otherwise return IFN_LAST. The caller
2183 is responsible for ensuring that any side-effects of the built-in
2184 call are dealt with correctly. E.g. if CALL sets errno, the caller
2185 must decide that the errno result isn't needed or make it available
2186 in some other way. */
2188 internal_fn
2189 replacement_internal_fn (gcall *call)
2191 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2193 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2194 if (ifn != IFN_LAST)
2196 tree_pair types = direct_internal_fn_types (ifn, call);
2197 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2198 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2199 return ifn;
2202 return IFN_LAST;
2205 /* Expand a call to the builtin trinary math functions (fma).
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2210 operands. */
2212 static rtx
2213 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2215 optab builtin_optab;
2216 rtx op0, op1, op2, result;
2217 rtx_insn *insns;
2218 tree fndecl = get_callee_fndecl (exp);
2219 tree arg0, arg1, arg2;
2220 machine_mode mode;
2222 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX;
2225 arg0 = CALL_EXPR_ARG (exp, 0);
2226 arg1 = CALL_EXPR_ARG (exp, 1);
2227 arg2 = CALL_EXPR_ARG (exp, 2);
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_FMA):
2232 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2233 builtin_optab = fma_optab; break;
2234 default:
2235 gcc_unreachable ();
2238 /* Make a suitable register to place result in. */
2239 mode = TYPE_MODE (TREE_TYPE (exp));
2241 /* Before working hard, check whether the instruction is available. */
2242 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2243 return NULL_RTX;
2245 result = gen_reg_rtx (mode);
2247 /* Always stabilize the argument list. */
2248 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2249 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2250 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2252 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2253 op1 = expand_normal (arg1);
2254 op2 = expand_normal (arg2);
2256 start_sequence ();
2258 /* Compute into RESULT.
2259 Set RESULT to wherever the result comes back. */
2260 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2261 result, 0);
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 if (result == 0)
2268 end_sequence ();
2269 return expand_call (exp, target, target == const0_rtx);
2272 /* Output the entire sequence. */
2273 insns = get_insns ();
2274 end_sequence ();
2275 emit_insn (insns);
2277 return result;
2280 /* Expand a call to the builtin sin and cos math functions.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function; if convenient, the result should be placed in TARGET.
2284 SUBTARGET may be used as the target for computing one of EXP's
2285 operands. */
2287 static rtx
2288 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2290 optab builtin_optab;
2291 rtx op0;
2292 rtx_insn *insns;
2293 tree fndecl = get_callee_fndecl (exp);
2294 machine_mode mode;
2295 tree arg;
2297 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2298 return NULL_RTX;
2300 arg = CALL_EXPR_ARG (exp, 0);
2302 switch (DECL_FUNCTION_CODE (fndecl))
2304 CASE_FLT_FN (BUILT_IN_SIN):
2305 CASE_FLT_FN (BUILT_IN_COS):
2306 builtin_optab = sincos_optab; break;
2307 default:
2308 gcc_unreachable ();
2311 /* Make a suitable register to place result in. */
2312 mode = TYPE_MODE (TREE_TYPE (exp));
2314 /* Check if sincos insn is available, otherwise fallback
2315 to sin or cos insn. */
2316 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2317 switch (DECL_FUNCTION_CODE (fndecl))
2319 CASE_FLT_FN (BUILT_IN_SIN):
2320 builtin_optab = sin_optab; break;
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = cos_optab; break;
2323 default:
2324 gcc_unreachable ();
2327 /* Before working hard, check whether the instruction is available. */
2328 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2330 rtx result = gen_reg_rtx (mode);
2332 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2333 need to expand the argument again. This way, we will not perform
2334 side-effects more the once. */
2335 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2337 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2339 start_sequence ();
2341 /* Compute into RESULT.
2342 Set RESULT to wherever the result comes back. */
2343 if (builtin_optab == sincos_optab)
2345 int ok;
2347 switch (DECL_FUNCTION_CODE (fndecl))
2349 CASE_FLT_FN (BUILT_IN_SIN):
2350 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2351 break;
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2354 break;
2355 default:
2356 gcc_unreachable ();
2358 gcc_assert (ok);
2360 else
2361 result = expand_unop (mode, builtin_optab, op0, result, 0);
2363 if (result != 0)
2365 /* Output the entire sequence. */
2366 insns = get_insns ();
2367 end_sequence ();
2368 emit_insn (insns);
2369 return result;
2372 /* If we were unable to expand via the builtin, stop the sequence
2373 (without outputting the insns) and call to the library function
2374 with the stabilized argument list. */
2375 end_sequence ();
2378 return expand_call (exp, target, target == const0_rtx);
2381 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2382 return an RTL instruction code that implements the functionality.
2383 If that isn't possible or available return CODE_FOR_nothing. */
2385 static enum insn_code
2386 interclass_mathfn_icode (tree arg, tree fndecl)
2388 bool errno_set = false;
2389 optab builtin_optab = unknown_optab;
2390 machine_mode mode;
2392 switch (DECL_FUNCTION_CODE (fndecl))
2394 CASE_FLT_FN (BUILT_IN_ILOGB):
2395 errno_set = true; builtin_optab = ilogb_optab; break;
2396 CASE_FLT_FN (BUILT_IN_ISINF):
2397 builtin_optab = isinf_optab; break;
2398 case BUILT_IN_ISNORMAL:
2399 case BUILT_IN_ISFINITE:
2400 CASE_FLT_FN (BUILT_IN_FINITE):
2401 case BUILT_IN_FINITED32:
2402 case BUILT_IN_FINITED64:
2403 case BUILT_IN_FINITED128:
2404 case BUILT_IN_ISINFD32:
2405 case BUILT_IN_ISINFD64:
2406 case BUILT_IN_ISINFD128:
2407 /* These builtins have no optabs (yet). */
2408 break;
2409 default:
2410 gcc_unreachable ();
2413 /* There's no easy way to detect the case we need to set EDOM. */
2414 if (flag_errno_math && errno_set)
2415 return CODE_FOR_nothing;
2417 /* Optab mode depends on the mode of the input argument. */
2418 mode = TYPE_MODE (TREE_TYPE (arg));
2420 if (builtin_optab)
2421 return optab_handler (builtin_optab, mode);
2422 return CODE_FOR_nothing;
2425 /* Expand a call to one of the builtin math functions that operate on
2426 floating point argument and output an integer result (ilogb, isinf,
2427 isnan, etc).
2428 Return 0 if a normal call should be emitted rather than expanding the
2429 function in-line. EXP is the expression that is a call to the builtin
2430 function; if convenient, the result should be placed in TARGET. */
2432 static rtx
2433 expand_builtin_interclass_mathfn (tree exp, rtx target)
2435 enum insn_code icode = CODE_FOR_nothing;
2436 rtx op0;
2437 tree fndecl = get_callee_fndecl (exp);
2438 machine_mode mode;
2439 tree arg;
2441 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2442 return NULL_RTX;
2444 arg = CALL_EXPR_ARG (exp, 0);
2445 icode = interclass_mathfn_icode (arg, fndecl);
2446 mode = TYPE_MODE (TREE_TYPE (arg));
2448 if (icode != CODE_FOR_nothing)
2450 class expand_operand ops[1];
2451 rtx_insn *last = get_last_insn ();
2452 tree orig_arg = arg;
2454 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2455 need to expand the argument again. This way, we will not perform
2456 side-effects more the once. */
2457 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2459 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2461 if (mode != GET_MODE (op0))
2462 op0 = convert_to_mode (mode, op0, 0);
2464 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2465 if (maybe_legitimize_operands (icode, 0, 1, ops)
2466 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2467 return ops[0].value;
2469 delete_insns_since (last);
2470 CALL_EXPR_ARG (exp, 0) = orig_arg;
2473 return NULL_RTX;
2476 /* Expand a call to the builtin sincos math function.
2477 Return NULL_RTX if a normal call should be emitted rather than expanding the
2478 function in-line. EXP is the expression that is a call to the builtin
2479 function. */
2481 static rtx
2482 expand_builtin_sincos (tree exp)
2484 rtx op0, op1, op2, target1, target2;
2485 machine_mode mode;
2486 tree arg, sinp, cosp;
2487 int result;
2488 location_t loc = EXPR_LOCATION (exp);
2489 tree alias_type, alias_off;
2491 if (!validate_arglist (exp, REAL_TYPE,
2492 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2493 return NULL_RTX;
2495 arg = CALL_EXPR_ARG (exp, 0);
2496 sinp = CALL_EXPR_ARG (exp, 1);
2497 cosp = CALL_EXPR_ARG (exp, 2);
2499 /* Make a suitable register to place result in. */
2500 mode = TYPE_MODE (TREE_TYPE (arg));
2502 /* Check if sincos insn is available, otherwise emit the call. */
2503 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2504 return NULL_RTX;
2506 target1 = gen_reg_rtx (mode);
2507 target2 = gen_reg_rtx (mode);
2509 op0 = expand_normal (arg);
2510 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2511 alias_off = build_int_cst (alias_type, 0);
2512 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2513 sinp, alias_off));
2514 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2515 cosp, alias_off));
2517 /* Compute into target1 and target2.
2518 Set TARGET to wherever the result comes back. */
2519 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2520 gcc_assert (result);
2522 /* Move target1 and target2 to the memory locations indicated
2523 by op1 and op2. */
2524 emit_move_insn (op1, target1);
2525 emit_move_insn (op2, target2);
2527 return const0_rtx;
2530 /* Expand a call to the internal cexpi builtin to the sincos math function.
2531 EXP is the expression that is a call to the builtin function; if convenient,
2532 the result should be placed in TARGET. */
2534 static rtx
2535 expand_builtin_cexpi (tree exp, rtx target)
2537 tree fndecl = get_callee_fndecl (exp);
2538 tree arg, type;
2539 machine_mode mode;
2540 rtx op0, op1, op2;
2541 location_t loc = EXPR_LOCATION (exp);
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544 return NULL_RTX;
2546 arg = CALL_EXPR_ARG (exp, 0);
2547 type = TREE_TYPE (arg);
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2550 /* Try expanding via a sincos optab, fall back to emitting a libcall
2551 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2552 is only generated from sincos, cexp or if we have either of them. */
2553 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2555 op1 = gen_reg_rtx (mode);
2556 op2 = gen_reg_rtx (mode);
2558 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2560 /* Compute into op1 and op2. */
2561 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2563 else if (targetm.libc_has_function (function_sincos, type))
2565 tree call, fn = NULL_TREE;
2566 tree top1, top2;
2567 rtx op1a, op2a;
2569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2570 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2571 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2572 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2573 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2574 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2575 else
2576 gcc_unreachable ();
2578 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2579 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2580 op1a = copy_addr_to_reg (XEXP (op1, 0));
2581 op2a = copy_addr_to_reg (XEXP (op2, 0));
2582 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2583 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2585 /* Make sure not to fold the sincos call again. */
2586 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2587 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2588 call, 3, arg, top1, top2));
2590 else
2592 tree call, fn = NULL_TREE, narg;
2593 tree ctype = build_complex_type (type);
2595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2596 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2598 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2599 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2600 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2601 else
2602 gcc_unreachable ();
2604 /* If we don't have a decl for cexp create one. This is the
2605 friendliest fallback if the user calls __builtin_cexpi
2606 without full target C99 function support. */
2607 if (fn == NULL_TREE)
2609 tree fntype;
2610 const char *name = NULL;
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 name = "cexpf";
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 name = "cexp";
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 name = "cexpl";
2619 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2620 fn = build_fn_decl (name, fntype);
2623 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2624 build_real (type, dconst0), arg);
2626 /* Make sure not to fold the cexp call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2628 return expand_expr (build_call_nary (ctype, call, 1, narg),
2629 target, VOIDmode, EXPAND_NORMAL);
2632 /* Now build the proper return type. */
2633 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2634 make_tree (TREE_TYPE (arg), op2),
2635 make_tree (TREE_TYPE (arg), op1)),
2636 target, VOIDmode, EXPAND_NORMAL);
2639 /* Conveniently construct a function call expression. FNDECL names the
2640 function to be called, N is the number of arguments, and the "..."
2641 parameters are the argument expressions. Unlike build_call_exr
2642 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2644 static tree
2645 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2647 va_list ap;
2648 tree fntype = TREE_TYPE (fndecl);
2649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2651 va_start (ap, n);
2652 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2653 va_end (ap);
2654 SET_EXPR_LOCATION (fn, loc);
2655 return fn;
2658 /* Expand a call to one of the builtin rounding functions gcc defines
2659 as an extension (lfloor and lceil). As these are gcc extensions we
2660 do not need to worry about setting errno to EDOM.
2661 If expanding via optab fails, lower expression to (int)(floor(x)).
2662 EXP is the expression that is a call to the builtin function;
2663 if convenient, the result should be placed in TARGET. */
2665 static rtx
2666 expand_builtin_int_roundingfn (tree exp, rtx target)
2668 convert_optab builtin_optab;
2669 rtx op0, tmp;
2670 rtx_insn *insns;
2671 tree fndecl = get_callee_fndecl (exp);
2672 enum built_in_function fallback_fn;
2673 tree fallback_fndecl;
2674 machine_mode mode;
2675 tree arg;
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2678 return NULL_RTX;
2680 arg = CALL_EXPR_ARG (exp, 0);
2682 switch (DECL_FUNCTION_CODE (fndecl))
2684 CASE_FLT_FN (BUILT_IN_ICEIL):
2685 CASE_FLT_FN (BUILT_IN_LCEIL):
2686 CASE_FLT_FN (BUILT_IN_LLCEIL):
2687 builtin_optab = lceil_optab;
2688 fallback_fn = BUILT_IN_CEIL;
2689 break;
2691 CASE_FLT_FN (BUILT_IN_IFLOOR):
2692 CASE_FLT_FN (BUILT_IN_LFLOOR):
2693 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2694 builtin_optab = lfloor_optab;
2695 fallback_fn = BUILT_IN_FLOOR;
2696 break;
2698 default:
2699 gcc_unreachable ();
2702 /* Make a suitable register to place result in. */
2703 mode = TYPE_MODE (TREE_TYPE (exp));
2705 target = gen_reg_rtx (mode);
2707 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2708 need to expand the argument again. This way, we will not perform
2709 side-effects more the once. */
2710 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2712 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2714 start_sequence ();
2716 /* Compute into TARGET. */
2717 if (expand_sfix_optab (target, op0, builtin_optab))
2719 /* Output the entire sequence. */
2720 insns = get_insns ();
2721 end_sequence ();
2722 emit_insn (insns);
2723 return target;
2726 /* If we were unable to expand via the builtin, stop the sequence
2727 (without outputting the insns). */
2728 end_sequence ();
2730 /* Fall back to floating point rounding optab. */
2731 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2733 /* For non-C99 targets we may end up without a fallback fndecl here
2734 if the user called __builtin_lfloor directly. In this case emit
2735 a call to the floor/ceil variants nevertheless. This should result
2736 in the best user experience for not full C99 targets. */
2737 if (fallback_fndecl == NULL_TREE)
2739 tree fntype;
2740 const char *name = NULL;
2742 switch (DECL_FUNCTION_CODE (fndecl))
2744 case BUILT_IN_ICEIL:
2745 case BUILT_IN_LCEIL:
2746 case BUILT_IN_LLCEIL:
2747 name = "ceil";
2748 break;
2749 case BUILT_IN_ICEILF:
2750 case BUILT_IN_LCEILF:
2751 case BUILT_IN_LLCEILF:
2752 name = "ceilf";
2753 break;
2754 case BUILT_IN_ICEILL:
2755 case BUILT_IN_LCEILL:
2756 case BUILT_IN_LLCEILL:
2757 name = "ceill";
2758 break;
2759 case BUILT_IN_IFLOOR:
2760 case BUILT_IN_LFLOOR:
2761 case BUILT_IN_LLFLOOR:
2762 name = "floor";
2763 break;
2764 case BUILT_IN_IFLOORF:
2765 case BUILT_IN_LFLOORF:
2766 case BUILT_IN_LLFLOORF:
2767 name = "floorf";
2768 break;
2769 case BUILT_IN_IFLOORL:
2770 case BUILT_IN_LFLOORL:
2771 case BUILT_IN_LLFLOORL:
2772 name = "floorl";
2773 break;
2774 default:
2775 gcc_unreachable ();
2778 fntype = build_function_type_list (TREE_TYPE (arg),
2779 TREE_TYPE (arg), NULL_TREE);
2780 fallback_fndecl = build_fn_decl (name, fntype);
2783 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2785 tmp = expand_normal (exp);
2786 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2788 /* Truncate the result of floating point optab to integer
2789 via expand_fix (). */
2790 target = gen_reg_rtx (mode);
2791 expand_fix (target, tmp, 0);
2793 return target;
2796 /* Expand a call to one of the builtin math functions doing integer
2797 conversion (lrint).
2798 Return 0 if a normal call should be emitted rather than expanding the
2799 function in-line. EXP is the expression that is a call to the builtin
2800 function; if convenient, the result should be placed in TARGET. */
2802 static rtx
2803 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2805 convert_optab builtin_optab;
2806 rtx op0;
2807 rtx_insn *insns;
2808 tree fndecl = get_callee_fndecl (exp);
2809 tree arg;
2810 machine_mode mode;
2811 enum built_in_function fallback_fn = BUILT_IN_NONE;
2813 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2814 return NULL_RTX;
2816 arg = CALL_EXPR_ARG (exp, 0);
2818 switch (DECL_FUNCTION_CODE (fndecl))
2820 CASE_FLT_FN (BUILT_IN_IRINT):
2821 fallback_fn = BUILT_IN_LRINT;
2822 gcc_fallthrough ();
2823 CASE_FLT_FN (BUILT_IN_LRINT):
2824 CASE_FLT_FN (BUILT_IN_LLRINT):
2825 builtin_optab = lrint_optab;
2826 break;
2828 CASE_FLT_FN (BUILT_IN_IROUND):
2829 fallback_fn = BUILT_IN_LROUND;
2830 gcc_fallthrough ();
2831 CASE_FLT_FN (BUILT_IN_LROUND):
2832 CASE_FLT_FN (BUILT_IN_LLROUND):
2833 builtin_optab = lround_optab;
2834 break;
2836 default:
2837 gcc_unreachable ();
2840 /* There's no easy way to detect the case we need to set EDOM. */
2841 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2842 return NULL_RTX;
2844 /* Make a suitable register to place result in. */
2845 mode = TYPE_MODE (TREE_TYPE (exp));
2847 /* There's no easy way to detect the case we need to set EDOM. */
2848 if (!flag_errno_math)
2850 rtx result = gen_reg_rtx (mode);
2852 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2853 need to expand the argument again. This way, we will not perform
2854 side-effects more the once. */
2855 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2857 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2859 start_sequence ();
2861 if (expand_sfix_optab (result, op0, builtin_optab))
2863 /* Output the entire sequence. */
2864 insns = get_insns ();
2865 end_sequence ();
2866 emit_insn (insns);
2867 return result;
2870 /* If we were unable to expand via the builtin, stop the sequence
2871 (without outputting the insns) and call to the library function
2872 with the stabilized argument list. */
2873 end_sequence ();
2876 if (fallback_fn != BUILT_IN_NONE)
2878 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2879 targets, (int) round (x) should never be transformed into
2880 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2881 a call to lround in the hope that the target provides at least some
2882 C99 functions. This should result in the best user experience for
2883 not full C99 targets. */
2884 tree fallback_fndecl = mathfn_built_in_1
2885 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2887 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2888 fallback_fndecl, 1, arg);
2890 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2891 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2892 return convert_to_mode (mode, target, 0);
2895 return expand_call (exp, target, target == const0_rtx);
2898 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2899 a normal call should be emitted rather than expanding the function
2900 in-line. EXP is the expression that is a call to the builtin
2901 function; if convenient, the result should be placed in TARGET. */
2903 static rtx
2904 expand_builtin_powi (tree exp, rtx target)
2906 tree arg0, arg1;
2907 rtx op0, op1;
2908 machine_mode mode;
2909 machine_mode mode2;
2911 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2912 return NULL_RTX;
2914 arg0 = CALL_EXPR_ARG (exp, 0);
2915 arg1 = CALL_EXPR_ARG (exp, 1);
2916 mode = TYPE_MODE (TREE_TYPE (exp));
2918 /* Emit a libcall to libgcc. */
2920 /* Mode of the 2nd argument must match that of an int. */
2921 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2923 if (target == NULL_RTX)
2924 target = gen_reg_rtx (mode);
2926 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2927 if (GET_MODE (op0) != mode)
2928 op0 = convert_to_mode (mode, op0, 0);
2929 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2930 if (GET_MODE (op1) != mode2)
2931 op1 = convert_to_mode (mode2, op1, 0);
2933 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2934 target, LCT_CONST, mode,
2935 op0, mode, op1, mode2);
2937 return target;
2940 /* Expand expression EXP which is a call to the strlen builtin. Return
2941 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2942 try to get the result in TARGET, if convenient. */
2944 static rtx
2945 expand_builtin_strlen (tree exp, rtx target,
2946 machine_mode target_mode)
2948 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2949 return NULL_RTX;
2951 tree src = CALL_EXPR_ARG (exp, 0);
2953 /* If the length can be computed at compile-time, return it. */
2954 if (tree len = c_strlen (src, 0))
2955 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2957 /* If the length can be computed at compile-time and is constant
2958 integer, but there are side-effects in src, evaluate
2959 src for side-effects, then return len.
2960 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2961 can be optimized into: i++; x = 3; */
2962 tree len = c_strlen (src, 1);
2963 if (len && TREE_CODE (len) == INTEGER_CST)
2965 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2966 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2969 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
2971 /* If SRC is not a pointer type, don't do this operation inline. */
2972 if (align == 0)
2973 return NULL_RTX;
2975 /* Bail out if we can't compute strlen in the right mode. */
2976 machine_mode insn_mode;
2977 enum insn_code icode = CODE_FOR_nothing;
2978 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2980 icode = optab_handler (strlen_optab, insn_mode);
2981 if (icode != CODE_FOR_nothing)
2982 break;
2984 if (insn_mode == VOIDmode)
2985 return NULL_RTX;
2987 /* Make a place to hold the source address. We will not expand
2988 the actual source until we are sure that the expansion will
2989 not fail -- there are trees that cannot be expanded twice. */
2990 rtx src_reg = gen_reg_rtx (Pmode);
2992 /* Mark the beginning of the strlen sequence so we can emit the
2993 source operand later. */
2994 rtx_insn *before_strlen = get_last_insn ();
2996 class expand_operand ops[4];
2997 create_output_operand (&ops[0], target, insn_mode);
2998 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2999 create_integer_operand (&ops[2], 0);
3000 create_integer_operand (&ops[3], align);
3001 if (!maybe_expand_insn (icode, 4, ops))
3002 return NULL_RTX;
3004 /* Check to see if the argument was declared attribute nonstring
3005 and if so, issue a warning since at this point it's not known
3006 to be nul-terminated. */
3007 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3009 /* Now that we are assured of success, expand the source. */
3010 start_sequence ();
3011 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3012 if (pat != src_reg)
3014 #ifdef POINTERS_EXTEND_UNSIGNED
3015 if (GET_MODE (pat) != Pmode)
3016 pat = convert_to_mode (Pmode, pat,
3017 POINTERS_EXTEND_UNSIGNED);
3018 #endif
3019 emit_move_insn (src_reg, pat);
3021 pat = get_insns ();
3022 end_sequence ();
3024 if (before_strlen)
3025 emit_insn_after (pat, before_strlen);
3026 else
3027 emit_insn_before (pat, get_insns ());
3029 /* Return the value in the proper mode for this function. */
3030 if (GET_MODE (ops[0].value) == target_mode)
3031 target = ops[0].value;
3032 else if (target != 0)
3033 convert_move (target, ops[0].value, 0);
3034 else
3035 target = convert_to_mode (target_mode, ops[0].value, 0);
3037 return target;
3040 /* Expand call EXP to the strnlen built-in, returning the result
3041 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3043 static rtx
3044 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3046 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3049 tree src = CALL_EXPR_ARG (exp, 0);
3050 tree bound = CALL_EXPR_ARG (exp, 1);
3052 if (!bound)
3053 return NULL_RTX;
3055 location_t loc = UNKNOWN_LOCATION;
3056 if (EXPR_HAS_LOCATION (exp))
3057 loc = EXPR_LOCATION (exp);
3059 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3060 so these conversions aren't necessary. */
3061 c_strlen_data lendata = { };
3062 tree len = c_strlen (src, 0, &lendata, 1);
3063 if (len)
3064 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3066 if (TREE_CODE (bound) == INTEGER_CST)
3068 if (!len)
3069 return NULL_RTX;
3071 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3072 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3075 if (TREE_CODE (bound) != SSA_NAME)
3076 return NULL_RTX;
3078 wide_int min, max;
3079 value_range r;
3080 get_global_range_query ()->range_of_expr (r, bound);
3081 if (r.kind () != VR_RANGE)
3082 return NULL_RTX;
3083 min = r.lower_bound ();
3084 max = r.upper_bound ();
3086 if (!len || TREE_CODE (len) != INTEGER_CST)
3088 bool exact;
3089 lendata.decl = unterminated_array (src, &len, &exact);
3090 if (!lendata.decl)
3091 return NULL_RTX;
3094 if (lendata.decl)
3095 return NULL_RTX;
3097 if (wi::gtu_p (min, wi::to_wide (len)))
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3100 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3101 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3104 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3105 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3106 a target constant. */
3108 static rtx
3109 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3110 fixed_size_mode mode)
3112 /* The REPresentation pointed to by DATA need not be a nul-terminated
3113 string but the caller guarantees it's large enough for MODE. */
3114 const char *rep = (const char *) data;
3116 /* The by-pieces infrastructure does not try to pick a vector mode
3117 for memcpy expansion. */
3118 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3119 /*nul_terminated=*/false);
3122 /* LEN specify length of the block of memcpy/memset operation.
3123 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3124 In some cases we can make very likely guess on max size, then we
3125 set it into PROBABLE_MAX_SIZE. */
3127 static void
3128 determine_block_size (tree len, rtx len_rtx,
3129 unsigned HOST_WIDE_INT *min_size,
3130 unsigned HOST_WIDE_INT *max_size,
3131 unsigned HOST_WIDE_INT *probable_max_size)
3133 if (CONST_INT_P (len_rtx))
3135 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3136 return;
3138 else
3140 wide_int min, max;
3141 enum value_range_kind range_type = VR_UNDEFINED;
3143 /* Determine bounds from the type. */
3144 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3145 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3146 else
3147 *min_size = 0;
3148 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3149 *probable_max_size = *max_size
3150 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3151 else
3152 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3154 if (TREE_CODE (len) == SSA_NAME)
3156 value_range r;
3157 get_global_range_query ()->range_of_expr (r, len);
3158 range_type = r.kind ();
3159 if (range_type != VR_UNDEFINED)
3161 min = wi::to_wide (r.min ());
3162 max = wi::to_wide (r.max ());
3165 if (range_type == VR_RANGE)
3167 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3168 *min_size = min.to_uhwi ();
3169 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3170 *probable_max_size = *max_size = max.to_uhwi ();
3172 else if (range_type == VR_ANTI_RANGE)
3174 /* Code like
3176 int n;
3177 if (n < 100)
3178 memcpy (a, b, n)
3180 Produce anti range allowing negative values of N. We still
3181 can use the information and make a guess that N is not negative.
3183 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3184 *probable_max_size = min.to_uhwi () - 1;
3187 gcc_checking_assert (*max_size <=
3188 (unsigned HOST_WIDE_INT)
3189 GET_MODE_MASK (GET_MODE (len_rtx)));
3192 /* Expand a call EXP to the memcpy builtin.
3193 Return NULL_RTX if we failed, the caller should emit a normal call,
3194 otherwise try to get the result in TARGET, if convenient (and in
3195 mode MODE if that's convenient). */
3197 static rtx
3198 expand_builtin_memcpy (tree exp, rtx target)
3200 if (!validate_arglist (exp,
3201 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3202 return NULL_RTX;
3204 tree dest = CALL_EXPR_ARG (exp, 0);
3205 tree src = CALL_EXPR_ARG (exp, 1);
3206 tree len = CALL_EXPR_ARG (exp, 2);
3208 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3209 /*retmode=*/ RETURN_BEGIN, false);
3212 /* Check a call EXP to the memmove built-in for validity.
3213 Return NULL_RTX on both success and failure. */
3215 static rtx
3216 expand_builtin_memmove (tree exp, rtx target)
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3222 tree dest = CALL_EXPR_ARG (exp, 0);
3223 tree src = CALL_EXPR_ARG (exp, 1);
3224 tree len = CALL_EXPR_ARG (exp, 2);
3226 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3227 /*retmode=*/ RETURN_BEGIN, true);
3230 /* Expand a call EXP to the mempcpy builtin.
3231 Return NULL_RTX if we failed; the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3235 static rtx
3236 expand_builtin_mempcpy (tree exp, rtx target)
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 return NULL_RTX;
3242 tree dest = CALL_EXPR_ARG (exp, 0);
3243 tree src = CALL_EXPR_ARG (exp, 1);
3244 tree len = CALL_EXPR_ARG (exp, 2);
3246 /* Policy does not generally allow using compute_objsize (which
3247 is used internally by check_memop_size) to change code generation
3248 or drive optimization decisions.
3250 In this instance it is safe because the code we generate has
3251 the same semantics regardless of the return value of
3252 check_memop_sizes. Exactly the same amount of data is copied
3253 and the return value is exactly the same in both cases.
3255 Furthermore, check_memop_size always uses mode 0 for the call to
3256 compute_objsize, so the imprecise nature of compute_objsize is
3257 avoided. */
3259 /* Avoid expanding mempcpy into memcpy when the call is determined
3260 to overflow the buffer. This also prevents the same overflow
3261 from being diagnosed again when expanding memcpy. */
3263 return expand_builtin_mempcpy_args (dest, src, len,
3264 target, exp, /*retmode=*/ RETURN_END);
3267 /* Helper function to do the actual work for expand of memory copy family
3268 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3269 of memory from SRC to DEST and assign to TARGET if convenient. Return
3270 value is based on RETMODE argument. */
3272 static rtx
3273 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3274 rtx target, tree exp, memop_ret retmode,
3275 bool might_overlap)
3277 unsigned int src_align = get_pointer_alignment (src);
3278 unsigned int dest_align = get_pointer_alignment (dest);
3279 rtx dest_mem, src_mem, dest_addr, len_rtx;
3280 HOST_WIDE_INT expected_size = -1;
3281 unsigned int expected_align = 0;
3282 unsigned HOST_WIDE_INT min_size;
3283 unsigned HOST_WIDE_INT max_size;
3284 unsigned HOST_WIDE_INT probable_max_size;
3286 bool is_move_done;
3288 /* If DEST is not a pointer type, call the normal function. */
3289 if (dest_align == 0)
3290 return NULL_RTX;
3292 /* If either SRC is not a pointer type, don't do this
3293 operation in-line. */
3294 if (src_align == 0)
3295 return NULL_RTX;
3297 if (currently_expanding_gimple_stmt)
3298 stringop_block_profile (currently_expanding_gimple_stmt,
3299 &expected_align, &expected_size);
3301 if (expected_align < dest_align)
3302 expected_align = dest_align;
3303 dest_mem = get_memory_rtx (dest, len);
3304 set_mem_align (dest_mem, dest_align);
3305 len_rtx = expand_normal (len);
3306 determine_block_size (len, len_rtx, &min_size, &max_size,
3307 &probable_max_size);
3309 /* Try to get the byte representation of the constant SRC points to,
3310 with its byte size in NBYTES. */
3311 unsigned HOST_WIDE_INT nbytes;
3312 const char *rep = getbyterep (src, &nbytes);
3314 /* If the function's constant bound LEN_RTX is less than or equal
3315 to the byte size of the representation of the constant argument,
3316 and if block move would be done by pieces, we can avoid loading
3317 the bytes from memory and only store the computed constant.
3318 This works in the overlap (memmove) case as well because
3319 store_by_pieces just generates a series of stores of constants
3320 from the representation returned by getbyterep(). */
3321 if (rep
3322 && CONST_INT_P (len_rtx)
3323 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3324 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3325 CONST_CAST (char *, rep),
3326 dest_align, false))
3328 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3329 builtin_memcpy_read_str,
3330 CONST_CAST (char *, rep),
3331 dest_align, false, retmode);
3332 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3333 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3334 return dest_mem;
3337 src_mem = get_memory_rtx (src, len);
3338 set_mem_align (src_mem, src_align);
3340 /* Copy word part most expediently. */
3341 enum block_op_methods method = BLOCK_OP_NORMAL;
3342 if (CALL_EXPR_TAILCALL (exp)
3343 && (retmode == RETURN_BEGIN || target == const0_rtx))
3344 method = BLOCK_OP_TAILCALL;
3345 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3346 && retmode == RETURN_END
3347 && !might_overlap
3348 && target != const0_rtx);
3349 if (use_mempcpy_call)
3350 method = BLOCK_OP_NO_LIBCALL_RET;
3351 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3352 expected_align, expected_size,
3353 min_size, max_size, probable_max_size,
3354 use_mempcpy_call, &is_move_done,
3355 might_overlap);
3357 /* Bail out when a mempcpy call would be expanded as libcall and when
3358 we have a target that provides a fast implementation
3359 of mempcpy routine. */
3360 if (!is_move_done)
3361 return NULL_RTX;
3363 if (dest_addr == pc_rtx)
3364 return NULL_RTX;
3366 if (dest_addr == 0)
3368 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3369 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3372 if (retmode != RETURN_BEGIN && target != const0_rtx)
3374 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3375 /* stpcpy pointer to last byte. */
3376 if (retmode == RETURN_END_MINUS_ONE)
3377 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3380 return dest_addr;
3383 static rtx
3384 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3385 rtx target, tree orig_exp, memop_ret retmode)
3387 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3388 retmode, false);
3391 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3392 we failed, the caller should emit a normal call, otherwise try to
3393 get the result in TARGET, if convenient.
3394 Return value is based on RETMODE argument. */
3396 static rtx
3397 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3399 class expand_operand ops[3];
3400 rtx dest_mem;
3401 rtx src_mem;
3403 if (!targetm.have_movstr ())
3404 return NULL_RTX;
3406 dest_mem = get_memory_rtx (dest, NULL);
3407 src_mem = get_memory_rtx (src, NULL);
3408 if (retmode == RETURN_BEGIN)
3410 target = force_reg (Pmode, XEXP (dest_mem, 0));
3411 dest_mem = replace_equiv_address (dest_mem, target);
3414 create_output_operand (&ops[0],
3415 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3416 create_fixed_operand (&ops[1], dest_mem);
3417 create_fixed_operand (&ops[2], src_mem);
3418 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3419 return NULL_RTX;
3421 if (retmode != RETURN_BEGIN && target != const0_rtx)
3423 target = ops[0].value;
3424 /* movstr is supposed to set end to the address of the NUL
3425 terminator. If the caller requested a mempcpy-like return value,
3426 adjust it. */
3427 if (retmode == RETURN_END)
3429 rtx tem = plus_constant (GET_MODE (target),
3430 gen_lowpart (GET_MODE (target), target), 1);
3431 emit_move_insn (target, force_operand (tem, NULL_RTX));
3434 return target;
3437 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3438 NULL_RTX if we failed the caller should emit a normal call, otherwise
3439 try to get the result in TARGET, if convenient (and in mode MODE if that's
3440 convenient). */
3442 static rtx
3443 expand_builtin_strcpy (tree exp, rtx target)
3445 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3446 return NULL_RTX;
3448 tree dest = CALL_EXPR_ARG (exp, 0);
3449 tree src = CALL_EXPR_ARG (exp, 1);
3451 return expand_builtin_strcpy_args (exp, dest, src, target);
3454 /* Helper function to do the actual work for expand_builtin_strcpy. The
3455 arguments to the builtin_strcpy call DEST and SRC are broken out
3456 so that this can also be called without constructing an actual CALL_EXPR.
3457 The other arguments and return value are the same as for
3458 expand_builtin_strcpy. */
3460 static rtx
3461 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3463 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3466 /* Expand a call EXP to the stpcpy builtin.
3467 Return NULL_RTX if we failed the caller should emit a normal call,
3468 otherwise try to get the result in TARGET, if convenient (and in
3469 mode MODE if that's convenient). */
3471 static rtx
3472 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3474 tree dst, src;
3475 location_t loc = EXPR_LOCATION (exp);
3477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3478 return NULL_RTX;
3480 dst = CALL_EXPR_ARG (exp, 0);
3481 src = CALL_EXPR_ARG (exp, 1);
3483 /* If return value is ignored, transform stpcpy into strcpy. */
3484 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3486 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3487 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3488 return expand_expr (result, target, mode, EXPAND_NORMAL);
3490 else
3492 tree len, lenp1;
3493 rtx ret;
3495 /* Ensure we get an actual string whose length can be evaluated at
3496 compile-time, not an expression containing a string. This is
3497 because the latter will potentially produce pessimized code
3498 when used to produce the return value. */
3499 c_strlen_data lendata = { };
3500 if (!c_getstr (src)
3501 || !(len = c_strlen (src, 0, &lendata, 1)))
3502 return expand_movstr (dst, src, target,
3503 /*retmode=*/ RETURN_END_MINUS_ONE);
3505 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3506 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3507 target, exp,
3508 /*retmode=*/ RETURN_END_MINUS_ONE);
3510 if (ret)
3511 return ret;
3513 if (TREE_CODE (len) == INTEGER_CST)
3515 rtx len_rtx = expand_normal (len);
3517 if (CONST_INT_P (len_rtx))
3519 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3521 if (ret)
3523 if (! target)
3525 if (mode != VOIDmode)
3526 target = gen_reg_rtx (mode);
3527 else
3528 target = gen_reg_rtx (GET_MODE (ret));
3530 if (GET_MODE (target) != GET_MODE (ret))
3531 ret = gen_lowpart (GET_MODE (target), ret);
3533 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3534 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3535 gcc_assert (ret);
3537 return target;
3542 return expand_movstr (dst, src, target,
3543 /*retmode=*/ RETURN_END_MINUS_ONE);
3547 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3548 arguments while being careful to avoid duplicate warnings (which could
3549 be issued if the expander were to expand the call, resulting in it
3550 being emitted in expand_call(). */
3552 static rtx
3553 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3555 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3557 /* The call has been successfully expanded. Check for nonstring
3558 arguments and issue warnings as appropriate. */
3559 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3560 return ret;
3563 return NULL_RTX;
3566 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3567 bytes from constant string DATA + OFFSET and return it as target
3568 constant. */
3571 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3572 fixed_size_mode mode)
3574 const char *str = (const char *) data;
3576 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3577 return const0_rtx;
3579 /* The by-pieces infrastructure does not try to pick a vector mode
3580 for strncpy expansion. */
3581 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3584 /* Helper to check the sizes of sequences and the destination of calls
3585 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3586 success (no overflow or invalid sizes), false otherwise. */
3588 static bool
3589 check_strncat_sizes (tree exp, tree objsize)
3591 tree dest = CALL_EXPR_ARG (exp, 0);
3592 tree src = CALL_EXPR_ARG (exp, 1);
3593 tree maxread = CALL_EXPR_ARG (exp, 2);
3595 /* Try to determine the range of lengths that the source expression
3596 refers to. */
3597 c_strlen_data lendata = { };
3598 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3600 /* Try to verify that the destination is big enough for the shortest
3601 string. */
3603 access_data data (nullptr, exp, access_read_write, maxread, true);
3604 if (!objsize && warn_stringop_overflow)
3606 /* If it hasn't been provided by __strncat_chk, try to determine
3607 the size of the destination object into which the source is
3608 being copied. */
3609 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3612 /* Add one for the terminating nul. */
3613 tree srclen = (lendata.minlen
3614 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3615 size_one_node)
3616 : NULL_TREE);
3618 /* The strncat function copies at most MAXREAD bytes and always appends
3619 the terminating nul so the specified upper bound should never be equal
3620 to (or greater than) the size of the destination. */
3621 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3622 && tree_int_cst_equal (objsize, maxread))
3624 location_t loc = EXPR_LOCATION (exp);
3625 warning_at (loc, OPT_Wstringop_overflow_,
3626 "%qD specified bound %E equals destination size",
3627 get_callee_fndecl (exp), maxread);
3629 return false;
3632 if (!srclen
3633 || (maxread && tree_fits_uhwi_p (maxread)
3634 && tree_fits_uhwi_p (srclen)
3635 && tree_int_cst_lt (maxread, srclen)))
3636 srclen = maxread;
3638 /* The number of bytes to write is LEN but check_access will alsoa
3639 check SRCLEN if LEN's value isn't known. */
3640 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3641 objsize, data.mode, &data);
3644 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3645 NULL_RTX if we failed the caller should emit a normal call. */
3647 static rtx
3648 expand_builtin_strncpy (tree exp, rtx target)
3650 location_t loc = EXPR_LOCATION (exp);
3652 if (!validate_arglist (exp,
3653 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX;
3655 tree dest = CALL_EXPR_ARG (exp, 0);
3656 tree src = CALL_EXPR_ARG (exp, 1);
3657 /* The number of bytes to write (not the maximum). */
3658 tree len = CALL_EXPR_ARG (exp, 2);
3660 /* The length of the source sequence. */
3661 tree slen = c_strlen (src, 1);
3663 /* We must be passed a constant len and src parameter. */
3664 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3665 return NULL_RTX;
3667 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3669 /* We're required to pad with trailing zeros if the requested
3670 len is greater than strlen(s2)+1. In that case try to
3671 use store_by_pieces, if it fails, punt. */
3672 if (tree_int_cst_lt (slen, len))
3674 unsigned int dest_align = get_pointer_alignment (dest);
3675 const char *p = c_getstr (src);
3676 rtx dest_mem;
3678 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3679 || !can_store_by_pieces (tree_to_uhwi (len),
3680 builtin_strncpy_read_str,
3681 CONST_CAST (char *, p),
3682 dest_align, false))
3683 return NULL_RTX;
3685 dest_mem = get_memory_rtx (dest, len);
3686 store_by_pieces (dest_mem, tree_to_uhwi (len),
3687 builtin_strncpy_read_str,
3688 CONST_CAST (char *, p), dest_align, false,
3689 RETURN_BEGIN);
3690 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3691 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3692 return dest_mem;
3695 return NULL_RTX;
3698 /* Return the RTL of a register in MODE generated from PREV in the
3699 previous iteration. */
3701 static rtx
3702 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3704 rtx target = nullptr;
3705 if (prev != nullptr && prev->data != nullptr)
3707 /* Use the previous data in the same mode. */
3708 if (prev->mode == mode)
3709 return prev->data;
3711 fixed_size_mode prev_mode = prev->mode;
3713 /* Don't use the previous data to write QImode if it is in a
3714 vector mode. */
3715 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3716 return target;
3718 rtx prev_rtx = prev->data;
3720 if (REG_P (prev_rtx)
3721 && HARD_REGISTER_P (prev_rtx)
3722 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3724 /* This case occurs when PREV_MODE is a vector and when
3725 MODE is too small to store using vector operations.
3726 After register allocation, the code will need to move the
3727 lowpart of the vector register into a non-vector register.
3729 Also, the target has chosen to use a hard register
3730 instead of going with the default choice of using a
3731 pseudo register. We should respect that choice and try to
3732 avoid creating a pseudo register with the same mode as the
3733 current hard register.
3735 In principle, we could just use a lowpart MODE subreg of
3736 the vector register. However, the vector register mode might
3737 be too wide for non-vector registers, and we already know
3738 that the non-vector mode is too small for vector registers.
3739 It's therefore likely that we'd need to spill to memory in
3740 the vector mode and reload the non-vector value from there.
3742 Try to avoid that by reducing the vector register to the
3743 smallest size that it can hold. This should increase the
3744 chances that non-vector registers can hold both the inner
3745 and outer modes of the subreg that we generate later. */
3746 machine_mode m;
3747 fixed_size_mode candidate;
3748 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3749 if (is_a<fixed_size_mode> (m, &candidate))
3751 if (GET_MODE_SIZE (candidate)
3752 >= GET_MODE_SIZE (prev_mode))
3753 break;
3754 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3755 && lowpart_subreg_regno (REGNO (prev_rtx),
3756 prev_mode, candidate) >= 0)
3758 target = lowpart_subreg (candidate, prev_rtx,
3759 prev_mode);
3760 prev_rtx = target;
3761 prev_mode = candidate;
3762 break;
3765 if (target == nullptr)
3766 prev_rtx = copy_to_reg (prev_rtx);
3769 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3771 return target;
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3776 constant. If PREV isn't nullptr, it has the RTL info from the
3777 previous iteration. */
3780 builtin_memset_read_str (void *data, void *prev,
3781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3782 fixed_size_mode mode)
3784 const char *c = (const char *) data;
3785 unsigned int size = GET_MODE_SIZE (mode);
3787 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3788 mode);
3789 if (target != nullptr)
3790 return target;
3791 rtx src = gen_int_mode (*c, QImode);
3793 if (VECTOR_MODE_P (mode))
3795 gcc_assert (GET_MODE_INNER (mode) == QImode);
3797 rtx const_vec = gen_const_vec_duplicate (mode, src);
3798 if (prev == NULL)
3799 /* Return CONST_VECTOR when called by a query function. */
3800 return const_vec;
3802 /* Use the move expander with CONST_VECTOR. */
3803 target = targetm.gen_memset_scratch_rtx (mode);
3804 emit_move_insn (target, const_vec);
3805 return target;
3808 char *p = XALLOCAVEC (char, size);
3810 memset (p, *c, size);
3812 /* Vector modes should be handled above. */
3813 return c_readstr (p, as_a <scalar_int_mode> (mode));
3816 /* Callback routine for store_by_pieces. Return the RTL of a register
3817 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3818 char value given in the RTL register data. For example, if mode is
3819 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3820 nullptr, it has the RTL info from the previous iteration. */
3822 static rtx
3823 builtin_memset_gen_str (void *data, void *prev,
3824 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3825 fixed_size_mode mode)
3827 rtx target, coeff;
3828 size_t size;
3829 char *p;
3831 size = GET_MODE_SIZE (mode);
3832 if (size == 1)
3833 return (rtx) data;
3835 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3836 if (target != nullptr)
3837 return target;
3839 if (VECTOR_MODE_P (mode))
3841 gcc_assert (GET_MODE_INNER (mode) == QImode);
3843 /* vec_duplicate_optab is a precondition to pick a vector mode for
3844 the memset expander. */
3845 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3847 target = targetm.gen_memset_scratch_rtx (mode);
3848 class expand_operand ops[2];
3849 create_output_operand (&ops[0], target, mode);
3850 create_input_operand (&ops[1], (rtx) data, QImode);
3851 expand_insn (icode, 2, ops);
3852 if (!rtx_equal_p (target, ops[0].value))
3853 emit_move_insn (target, ops[0].value);
3855 return target;
3858 p = XALLOCAVEC (char, size);
3859 memset (p, 1, size);
3860 /* Vector modes should be handled above. */
3861 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3863 target = convert_to_mode (mode, (rtx) data, 1);
3864 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3865 return force_reg (mode, target);
3868 /* Expand expression EXP, which is a call to the memset builtin. Return
3869 NULL_RTX if we failed the caller should emit a normal call, otherwise
3870 try to get the result in TARGET, if convenient (and in mode MODE if that's
3871 convenient). */
3874 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3876 if (!validate_arglist (exp,
3877 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3878 return NULL_RTX;
3880 tree dest = CALL_EXPR_ARG (exp, 0);
3881 tree val = CALL_EXPR_ARG (exp, 1);
3882 tree len = CALL_EXPR_ARG (exp, 2);
3884 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3887 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3888 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3889 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3890 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3892 The strategy is to issue one store_by_pieces for each power of two,
3893 from most to least significant, guarded by a test on whether there
3894 are at least that many bytes left to copy in LEN.
3896 ??? Should we skip some powers of two in favor of loops? Maybe start
3897 at the max of TO/LEN/word alignment, at least when optimizing for
3898 size, instead of ensuring O(log len) dynamic compares? */
3900 bool
3901 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3902 unsigned HOST_WIDE_INT min_len,
3903 unsigned HOST_WIDE_INT max_len,
3904 rtx val, char valc, unsigned int align)
3906 int max_bits = floor_log2 (max_len);
3907 int min_bits = floor_log2 (min_len);
3908 int sctz_len = ctz_len;
3910 gcc_checking_assert (sctz_len >= 0);
3912 if (val)
3913 valc = 1;
3915 /* Bits more significant than TST_BITS are part of the shared prefix
3916 in the binary representation of both min_len and max_len. Since
3917 they're identical, we don't need to test them in the loop. */
3918 int tst_bits = (max_bits != min_bits ? max_bits
3919 : floor_log2 (max_len ^ min_len));
3921 /* Check whether it's profitable to start by storing a fixed BLKSIZE
3922 bytes, to lower max_bits. In the unlikely case of a constant LEN
3923 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3924 single store_by_pieces, but otherwise, select the minimum multiple
3925 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3926 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
3927 unsigned HOST_WIDE_INT blksize;
3928 if (max_len > min_len)
3930 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
3931 align / BITS_PER_UNIT);
3932 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
3933 blksize &= ~(alrng - 1);
3935 else if (max_len == min_len)
3936 blksize = max_len;
3937 else
3938 gcc_unreachable ();
3939 if (min_len >= blksize)
3941 min_len -= blksize;
3942 min_bits = floor_log2 (min_len);
3943 max_len -= blksize;
3944 max_bits = floor_log2 (max_len);
3946 tst_bits = (max_bits != min_bits ? max_bits
3947 : floor_log2 (max_len ^ min_len));
3949 else
3950 blksize = 0;
3952 /* Check that we can use store by pieces for the maximum store count
3953 we may issue (initial fixed-size block, plus conditional
3954 power-of-two-sized from max_bits to ctz_len. */
3955 unsigned HOST_WIDE_INT xlenest = blksize;
3956 if (max_bits >= 0)
3957 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
3958 - (HOST_WIDE_INT_1U << ctz_len));
3959 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
3960 &valc, align, true))
3961 return false;
3963 by_pieces_constfn constfun;
3964 void *constfundata;
3965 if (val)
3967 constfun = builtin_memset_gen_str;
3968 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
3969 val);
3971 else
3973 constfun = builtin_memset_read_str;
3974 constfundata = &valc;
3977 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
3978 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
3979 to = replace_equiv_address (to, ptr);
3980 set_mem_align (to, align);
3982 if (blksize)
3984 to = store_by_pieces (to, blksize,
3985 constfun, constfundata,
3986 align, true,
3987 max_len != 0 ? RETURN_END : RETURN_BEGIN);
3988 if (max_len == 0)
3989 return true;
3991 /* Adjust PTR, TO and REM. Since TO's address is likely
3992 PTR+offset, we have to replace it. */
3993 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
3994 to = replace_equiv_address (to, ptr);
3995 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
3996 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
3999 /* Iterate over power-of-two block sizes from the maximum length to
4000 the least significant bit possibly set in the length. */
4001 for (int i = max_bits; i >= sctz_len; i--)
4003 rtx_code_label *label = NULL;
4004 blksize = HOST_WIDE_INT_1U << i;
4006 /* If we're past the bits shared between min_ and max_len, expand
4007 a test on the dynamic length, comparing it with the
4008 BLKSIZE. */
4009 if (i <= tst_bits)
4011 label = gen_label_rtx ();
4012 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4013 ptr_mode, 1, label,
4014 profile_probability::even ());
4016 /* If we are at a bit that is in the prefix shared by min_ and
4017 max_len, skip this BLKSIZE if the bit is clear. */
4018 else if ((max_len & blksize) == 0)
4019 continue;
4021 /* Issue a store of BLKSIZE bytes. */
4022 to = store_by_pieces (to, blksize,
4023 constfun, constfundata,
4024 align, true,
4025 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4027 /* Adjust REM and PTR, unless this is the last iteration. */
4028 if (i != sctz_len)
4030 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4031 to = replace_equiv_address (to, ptr);
4032 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4033 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4036 if (label)
4038 emit_label (label);
4040 /* Given conditional stores, the offset can no longer be
4041 known, so clear it. */
4042 clear_mem_offset (to);
4046 return true;
4049 /* Helper function to do the actual work for expand_builtin_memset. The
4050 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4051 so that this can also be called without constructing an actual CALL_EXPR.
4052 The other arguments and return value are the same as for
4053 expand_builtin_memset. */
4055 static rtx
4056 expand_builtin_memset_args (tree dest, tree val, tree len,
4057 rtx target, machine_mode mode, tree orig_exp)
4059 tree fndecl, fn;
4060 enum built_in_function fcode;
4061 machine_mode val_mode;
4062 char c;
4063 unsigned int dest_align;
4064 rtx dest_mem, dest_addr, len_rtx;
4065 HOST_WIDE_INT expected_size = -1;
4066 unsigned int expected_align = 0;
4067 unsigned HOST_WIDE_INT min_size;
4068 unsigned HOST_WIDE_INT max_size;
4069 unsigned HOST_WIDE_INT probable_max_size;
4071 dest_align = get_pointer_alignment (dest);
4073 /* If DEST is not a pointer type, don't do this operation in-line. */
4074 if (dest_align == 0)
4075 return NULL_RTX;
4077 if (currently_expanding_gimple_stmt)
4078 stringop_block_profile (currently_expanding_gimple_stmt,
4079 &expected_align, &expected_size);
4081 if (expected_align < dest_align)
4082 expected_align = dest_align;
4084 /* If the LEN parameter is zero, return DEST. */
4085 if (integer_zerop (len))
4087 /* Evaluate and ignore VAL in case it has side-effects. */
4088 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4089 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4092 /* Stabilize the arguments in case we fail. */
4093 dest = builtin_save_expr (dest);
4094 val = builtin_save_expr (val);
4095 len = builtin_save_expr (len);
4097 len_rtx = expand_normal (len);
4098 determine_block_size (len, len_rtx, &min_size, &max_size,
4099 &probable_max_size);
4100 dest_mem = get_memory_rtx (dest, len);
4101 val_mode = TYPE_MODE (unsigned_char_type_node);
4103 if (TREE_CODE (val) != INTEGER_CST
4104 || target_char_cast (val, &c))
4106 rtx val_rtx;
4108 val_rtx = expand_normal (val);
4109 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4111 /* Assume that we can memset by pieces if we can store
4112 * the coefficients by pieces (in the required modes).
4113 * We can't pass builtin_memset_gen_str as that emits RTL. */
4114 c = 1;
4115 if (tree_fits_uhwi_p (len)
4116 && can_store_by_pieces (tree_to_uhwi (len),
4117 builtin_memset_read_str, &c, dest_align,
4118 true))
4120 val_rtx = force_reg (val_mode, val_rtx);
4121 store_by_pieces (dest_mem, tree_to_uhwi (len),
4122 builtin_memset_gen_str, val_rtx, dest_align,
4123 true, RETURN_BEGIN);
4125 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4126 dest_align, expected_align,
4127 expected_size, min_size, max_size,
4128 probable_max_size)
4129 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4130 tree_ctz (len),
4131 min_size, max_size,
4132 val_rtx, 0,
4133 dest_align))
4134 goto do_libcall;
4136 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4137 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4138 return dest_mem;
4141 if (c)
4143 if (tree_fits_uhwi_p (len)
4144 && can_store_by_pieces (tree_to_uhwi (len),
4145 builtin_memset_read_str, &c, dest_align,
4146 true))
4147 store_by_pieces (dest_mem, tree_to_uhwi (len),
4148 builtin_memset_read_str, &c, dest_align, true,
4149 RETURN_BEGIN);
4150 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4151 gen_int_mode (c, val_mode),
4152 dest_align, expected_align,
4153 expected_size, min_size, max_size,
4154 probable_max_size)
4155 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4156 tree_ctz (len),
4157 min_size, max_size,
4158 NULL_RTX, c,
4159 dest_align))
4160 goto do_libcall;
4162 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4163 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4164 return dest_mem;
4167 set_mem_align (dest_mem, dest_align);
4168 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4169 CALL_EXPR_TAILCALL (orig_exp)
4170 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4171 expected_align, expected_size,
4172 min_size, max_size,
4173 probable_max_size, tree_ctz (len));
4175 if (dest_addr == 0)
4177 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4178 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4181 return dest_addr;
4183 do_libcall:
4184 fndecl = get_callee_fndecl (orig_exp);
4185 fcode = DECL_FUNCTION_CODE (fndecl);
4186 if (fcode == BUILT_IN_MEMSET)
4187 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4188 dest, val, len);
4189 else if (fcode == BUILT_IN_BZERO)
4190 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4191 dest, len);
4192 else
4193 gcc_unreachable ();
4194 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4195 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4196 return expand_call (fn, target, target == const0_rtx);
4199 /* Expand expression EXP, which is a call to the bzero builtin. Return
4200 NULL_RTX if we failed the caller should emit a normal call. */
4202 static rtx
4203 expand_builtin_bzero (tree exp)
4205 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4206 return NULL_RTX;
4208 tree dest = CALL_EXPR_ARG (exp, 0);
4209 tree size = CALL_EXPR_ARG (exp, 1);
4211 /* New argument list transforming bzero(ptr x, int y) to
4212 memset(ptr x, int 0, size_t y). This is done this way
4213 so that if it isn't expanded inline, we fallback to
4214 calling bzero instead of memset. */
4216 location_t loc = EXPR_LOCATION (exp);
4218 return expand_builtin_memset_args (dest, integer_zero_node,
4219 fold_convert_loc (loc,
4220 size_type_node, size),
4221 const0_rtx, VOIDmode, exp);
4224 /* Try to expand cmpstr operation ICODE with the given operands.
4225 Return the result rtx on success, otherwise return null. */
4227 static rtx
4228 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4229 HOST_WIDE_INT align)
4231 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4233 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4234 target = NULL_RTX;
4236 class expand_operand ops[4];
4237 create_output_operand (&ops[0], target, insn_mode);
4238 create_fixed_operand (&ops[1], arg1_rtx);
4239 create_fixed_operand (&ops[2], arg2_rtx);
4240 create_integer_operand (&ops[3], align);
4241 if (maybe_expand_insn (icode, 4, ops))
4242 return ops[0].value;
4243 return NULL_RTX;
4246 /* Expand expression EXP, which is a call to the memcmp built-in function.
4247 Return NULL_RTX if we failed and the caller should emit a normal call,
4248 otherwise try to get the result in TARGET, if convenient.
4249 RESULT_EQ is true if we can relax the returned value to be either zero
4250 or nonzero, without caring about the sign. */
4252 static rtx
4253 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4259 tree arg1 = CALL_EXPR_ARG (exp, 0);
4260 tree arg2 = CALL_EXPR_ARG (exp, 1);
4261 tree len = CALL_EXPR_ARG (exp, 2);
4263 /* Due to the performance benefit, always inline the calls first
4264 when result_eq is false. */
4265 rtx result = NULL_RTX;
4266 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4267 if (!result_eq && fcode != BUILT_IN_BCMP)
4269 result = inline_expand_builtin_bytecmp (exp, target);
4270 if (result)
4271 return result;
4274 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4275 location_t loc = EXPR_LOCATION (exp);
4277 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4278 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4280 /* If we don't have POINTER_TYPE, call the function. */
4281 if (arg1_align == 0 || arg2_align == 0)
4282 return NULL_RTX;
4284 rtx arg1_rtx = get_memory_rtx (arg1, len);
4285 rtx arg2_rtx = get_memory_rtx (arg2, len);
4286 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4288 /* Set MEM_SIZE as appropriate. */
4289 if (CONST_INT_P (len_rtx))
4291 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4292 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4295 by_pieces_constfn constfn = NULL;
4297 /* Try to get the byte representation of the constant ARG2 (or, only
4298 when the function's result is used for equality to zero, ARG1)
4299 points to, with its byte size in NBYTES. */
4300 unsigned HOST_WIDE_INT nbytes;
4301 const char *rep = getbyterep (arg2, &nbytes);
4302 if (result_eq && rep == NULL)
4304 /* For equality to zero the arguments are interchangeable. */
4305 rep = getbyterep (arg1, &nbytes);
4306 if (rep != NULL)
4307 std::swap (arg1_rtx, arg2_rtx);
4310 /* If the function's constant bound LEN_RTX is less than or equal
4311 to the byte size of the representation of the constant argument,
4312 and if block move would be done by pieces, we can avoid loading
4313 the bytes from memory and only store the computed constant result. */
4314 if (rep
4315 && CONST_INT_P (len_rtx)
4316 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4317 constfn = builtin_memcpy_read_str;
4319 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4320 TREE_TYPE (len), target,
4321 result_eq, constfn,
4322 CONST_CAST (char *, rep));
4324 if (result)
4326 /* Return the value in the proper mode for this function. */
4327 if (GET_MODE (result) == mode)
4328 return result;
4330 if (target != 0)
4332 convert_move (target, result, 0);
4333 return target;
4336 return convert_to_mode (mode, result, 0);
4339 return NULL_RTX;
4342 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4343 if we failed the caller should emit a normal call, otherwise try to get
4344 the result in TARGET, if convenient. */
4346 static rtx
4347 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4350 return NULL_RTX;
4352 tree arg1 = CALL_EXPR_ARG (exp, 0);
4353 tree arg2 = CALL_EXPR_ARG (exp, 1);
4355 /* Due to the performance benefit, always inline the calls first. */
4356 rtx result = NULL_RTX;
4357 result = inline_expand_builtin_bytecmp (exp, target);
4358 if (result)
4359 return result;
4361 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4362 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4363 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4364 return NULL_RTX;
4366 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4367 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4369 /* If we don't have POINTER_TYPE, call the function. */
4370 if (arg1_align == 0 || arg2_align == 0)
4371 return NULL_RTX;
4373 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4374 arg1 = builtin_save_expr (arg1);
4375 arg2 = builtin_save_expr (arg2);
4377 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4378 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4380 /* Try to call cmpstrsi. */
4381 if (cmpstr_icode != CODE_FOR_nothing)
4382 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4383 MIN (arg1_align, arg2_align));
4385 /* Try to determine at least one length and call cmpstrnsi. */
4386 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4388 tree len;
4389 rtx arg3_rtx;
4391 tree len1 = c_strlen (arg1, 1);
4392 tree len2 = c_strlen (arg2, 1);
4394 if (len1)
4395 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4396 if (len2)
4397 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4399 /* If we don't have a constant length for the first, use the length
4400 of the second, if we know it. We don't require a constant for
4401 this case; some cost analysis could be done if both are available
4402 but neither is constant. For now, assume they're equally cheap,
4403 unless one has side effects. If both strings have constant lengths,
4404 use the smaller. */
4406 if (!len1)
4407 len = len2;
4408 else if (!len2)
4409 len = len1;
4410 else if (TREE_SIDE_EFFECTS (len1))
4411 len = len2;
4412 else if (TREE_SIDE_EFFECTS (len2))
4413 len = len1;
4414 else if (TREE_CODE (len1) != INTEGER_CST)
4415 len = len2;
4416 else if (TREE_CODE (len2) != INTEGER_CST)
4417 len = len1;
4418 else if (tree_int_cst_lt (len1, len2))
4419 len = len1;
4420 else
4421 len = len2;
4423 /* If both arguments have side effects, we cannot optimize. */
4424 if (len && !TREE_SIDE_EFFECTS (len))
4426 arg3_rtx = expand_normal (len);
4427 result = expand_cmpstrn_or_cmpmem
4428 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4429 arg3_rtx, MIN (arg1_align, arg2_align));
4433 tree fndecl = get_callee_fndecl (exp);
4434 if (result)
4436 /* Return the value in the proper mode for this function. */
4437 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4438 if (GET_MODE (result) == mode)
4439 return result;
4440 if (target == 0)
4441 return convert_to_mode (mode, result, 0);
4442 convert_move (target, result, 0);
4443 return target;
4446 /* Expand the library call ourselves using a stabilized argument
4447 list to avoid re-evaluating the function's arguments twice. */
4448 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4449 copy_warning (fn, exp);
4450 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4451 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4452 return expand_call (fn, target, target == const0_rtx);
4455 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4456 NULL_RTX if we failed the caller should emit a normal call, otherwise
4457 try to get the result in TARGET, if convenient. */
4459 static rtx
4460 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4461 ATTRIBUTE_UNUSED machine_mode mode)
4463 if (!validate_arglist (exp,
4464 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4465 return NULL_RTX;
4467 tree arg1 = CALL_EXPR_ARG (exp, 0);
4468 tree arg2 = CALL_EXPR_ARG (exp, 1);
4469 tree arg3 = CALL_EXPR_ARG (exp, 2);
4471 location_t loc = EXPR_LOCATION (exp);
4472 tree len1 = c_strlen (arg1, 1);
4473 tree len2 = c_strlen (arg2, 1);
4475 /* Due to the performance benefit, always inline the calls first. */
4476 rtx result = NULL_RTX;
4477 result = inline_expand_builtin_bytecmp (exp, target);
4478 if (result)
4479 return result;
4481 /* If c_strlen can determine an expression for one of the string
4482 lengths, and it doesn't have side effects, then emit cmpstrnsi
4483 using length MIN(strlen(string)+1, arg3). */
4484 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4485 if (cmpstrn_icode == CODE_FOR_nothing)
4486 return NULL_RTX;
4488 tree len;
4490 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4491 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4493 if (len1)
4494 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4495 if (len2)
4496 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4498 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4500 /* If we don't have a constant length for the first, use the length
4501 of the second, if we know it. If neither string is constant length,
4502 use the given length argument. We don't require a constant for
4503 this case; some cost analysis could be done if both are available
4504 but neither is constant. For now, assume they're equally cheap,
4505 unless one has side effects. If both strings have constant lengths,
4506 use the smaller. */
4508 if (!len1 && !len2)
4509 len = len3;
4510 else if (!len1)
4511 len = len2;
4512 else if (!len2)
4513 len = len1;
4514 else if (TREE_SIDE_EFFECTS (len1))
4515 len = len2;
4516 else if (TREE_SIDE_EFFECTS (len2))
4517 len = len1;
4518 else if (TREE_CODE (len1) != INTEGER_CST)
4519 len = len2;
4520 else if (TREE_CODE (len2) != INTEGER_CST)
4521 len = len1;
4522 else if (tree_int_cst_lt (len1, len2))
4523 len = len1;
4524 else
4525 len = len2;
4527 /* If we are not using the given length, we must incorporate it here.
4528 The actual new length parameter will be MIN(len,arg3) in this case. */
4529 if (len != len3)
4531 len = fold_convert_loc (loc, sizetype, len);
4532 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4534 rtx arg1_rtx = get_memory_rtx (arg1, len);
4535 rtx arg2_rtx = get_memory_rtx (arg2, len);
4536 rtx arg3_rtx = expand_normal (len);
4537 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4538 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4539 MIN (arg1_align, arg2_align));
4541 tree fndecl = get_callee_fndecl (exp);
4542 if (result)
4544 /* Return the value in the proper mode for this function. */
4545 mode = TYPE_MODE (TREE_TYPE (exp));
4546 if (GET_MODE (result) == mode)
4547 return result;
4548 if (target == 0)
4549 return convert_to_mode (mode, result, 0);
4550 convert_move (target, result, 0);
4551 return target;
4554 /* Expand the library call ourselves using a stabilized argument
4555 list to avoid re-evaluating the function's arguments twice. */
4556 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4557 copy_warning (call, exp);
4558 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4559 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4560 return expand_call (call, target, target == const0_rtx);
4563 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4564 if that's convenient. */
4567 expand_builtin_saveregs (void)
4569 rtx val;
4570 rtx_insn *seq;
4572 /* Don't do __builtin_saveregs more than once in a function.
4573 Save the result of the first call and reuse it. */
4574 if (saveregs_value != 0)
4575 return saveregs_value;
4577 /* When this function is called, it means that registers must be
4578 saved on entry to this function. So we migrate the call to the
4579 first insn of this function. */
4581 start_sequence ();
4583 /* Do whatever the machine needs done in this case. */
4584 val = targetm.calls.expand_builtin_saveregs ();
4586 seq = get_insns ();
4587 end_sequence ();
4589 saveregs_value = val;
4591 /* Put the insns after the NOTE that starts the function. If this
4592 is inside a start_sequence, make the outer-level insn chain current, so
4593 the code is placed at the start of the function. */
4594 push_topmost_sequence ();
4595 emit_insn_after (seq, entry_of_function ());
4596 pop_topmost_sequence ();
4598 return val;
4601 /* Expand a call to __builtin_next_arg. */
4603 static rtx
4604 expand_builtin_next_arg (void)
4606 /* Checking arguments is already done in fold_builtin_next_arg
4607 that must be called before this function. */
4608 return expand_binop (ptr_mode, add_optab,
4609 crtl->args.internal_arg_pointer,
4610 crtl->args.arg_offset_rtx,
4611 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4614 /* Make it easier for the backends by protecting the valist argument
4615 from multiple evaluations. */
4617 static tree
4618 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4620 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4622 /* The current way of determining the type of valist is completely
4623 bogus. We should have the information on the va builtin instead. */
4624 if (!vatype)
4625 vatype = targetm.fn_abi_va_list (cfun->decl);
4627 if (TREE_CODE (vatype) == ARRAY_TYPE)
4629 if (TREE_SIDE_EFFECTS (valist))
4630 valist = save_expr (valist);
4632 /* For this case, the backends will be expecting a pointer to
4633 vatype, but it's possible we've actually been given an array
4634 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4635 So fix it. */
4636 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4638 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4639 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4642 else
4644 tree pt = build_pointer_type (vatype);
4646 if (! needs_lvalue)
4648 if (! TREE_SIDE_EFFECTS (valist))
4649 return valist;
4651 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4652 TREE_SIDE_EFFECTS (valist) = 1;
4655 if (TREE_SIDE_EFFECTS (valist))
4656 valist = save_expr (valist);
4657 valist = fold_build2_loc (loc, MEM_REF,
4658 vatype, valist, build_int_cst (pt, 0));
4661 return valist;
4664 /* The "standard" definition of va_list is void*. */
4666 tree
4667 std_build_builtin_va_list (void)
4669 return ptr_type_node;
4672 /* The "standard" abi va_list is va_list_type_node. */
4674 tree
4675 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4677 return va_list_type_node;
4680 /* The "standard" type of va_list is va_list_type_node. */
4682 tree
4683 std_canonical_va_list_type (tree type)
4685 tree wtype, htype;
4687 wtype = va_list_type_node;
4688 htype = type;
4690 if (TREE_CODE (wtype) == ARRAY_TYPE)
4692 /* If va_list is an array type, the argument may have decayed
4693 to a pointer type, e.g. by being passed to another function.
4694 In that case, unwrap both types so that we can compare the
4695 underlying records. */
4696 if (TREE_CODE (htype) == ARRAY_TYPE
4697 || POINTER_TYPE_P (htype))
4699 wtype = TREE_TYPE (wtype);
4700 htype = TREE_TYPE (htype);
4703 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4704 return va_list_type_node;
4706 return NULL_TREE;
4709 /* The "standard" implementation of va_start: just assign `nextarg' to
4710 the variable. */
4712 void
4713 std_expand_builtin_va_start (tree valist, rtx nextarg)
4715 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4716 convert_move (va_r, nextarg, 0);
4719 /* Expand EXP, a call to __builtin_va_start. */
4721 static rtx
4722 expand_builtin_va_start (tree exp)
4724 rtx nextarg;
4725 tree valist;
4726 location_t loc = EXPR_LOCATION (exp);
4728 if (call_expr_nargs (exp) < 2)
4730 error_at (loc, "too few arguments to function %<va_start%>");
4731 return const0_rtx;
4734 if (fold_builtin_next_arg (exp, true))
4735 return const0_rtx;
4737 nextarg = expand_builtin_next_arg ();
4738 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4740 if (targetm.expand_builtin_va_start)
4741 targetm.expand_builtin_va_start (valist, nextarg);
4742 else
4743 std_expand_builtin_va_start (valist, nextarg);
4745 return const0_rtx;
4748 /* Expand EXP, a call to __builtin_va_end. */
4750 static rtx
4751 expand_builtin_va_end (tree exp)
4753 tree valist = CALL_EXPR_ARG (exp, 0);
4755 /* Evaluate for side effects, if needed. I hate macros that don't
4756 do that. */
4757 if (TREE_SIDE_EFFECTS (valist))
4758 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4760 return const0_rtx;
4763 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4764 builtin rather than just as an assignment in stdarg.h because of the
4765 nastiness of array-type va_list types. */
4767 static rtx
4768 expand_builtin_va_copy (tree exp)
4770 tree dst, src, t;
4771 location_t loc = EXPR_LOCATION (exp);
4773 dst = CALL_EXPR_ARG (exp, 0);
4774 src = CALL_EXPR_ARG (exp, 1);
4776 dst = stabilize_va_list_loc (loc, dst, 1);
4777 src = stabilize_va_list_loc (loc, src, 0);
4779 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4781 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4783 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4784 TREE_SIDE_EFFECTS (t) = 1;
4785 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4787 else
4789 rtx dstb, srcb, size;
4791 /* Evaluate to pointers. */
4792 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4793 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4794 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4795 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4797 dstb = convert_memory_address (Pmode, dstb);
4798 srcb = convert_memory_address (Pmode, srcb);
4800 /* "Dereference" to BLKmode memories. */
4801 dstb = gen_rtx_MEM (BLKmode, dstb);
4802 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4803 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4804 srcb = gen_rtx_MEM (BLKmode, srcb);
4805 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4806 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4808 /* Copy. */
4809 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4812 return const0_rtx;
4815 /* Expand a call to one of the builtin functions __builtin_frame_address or
4816 __builtin_return_address. */
4818 static rtx
4819 expand_builtin_frame_address (tree fndecl, tree exp)
4821 /* The argument must be a nonnegative integer constant.
4822 It counts the number of frames to scan up the stack.
4823 The value is either the frame pointer value or the return
4824 address saved in that frame. */
4825 if (call_expr_nargs (exp) == 0)
4826 /* Warning about missing arg was already issued. */
4827 return const0_rtx;
4828 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4830 error ("invalid argument to %qD", fndecl);
4831 return const0_rtx;
4833 else
4835 /* Number of frames to scan up the stack. */
4836 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4838 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4840 /* Some ports cannot access arbitrary stack frames. */
4841 if (tem == NULL)
4843 warning (0, "unsupported argument to %qD", fndecl);
4844 return const0_rtx;
4847 if (count)
4849 /* Warn since no effort is made to ensure that any frame
4850 beyond the current one exists or can be safely reached. */
4851 warning (OPT_Wframe_address, "calling %qD with "
4852 "a nonzero argument is unsafe", fndecl);
4855 /* For __builtin_frame_address, return what we've got. */
4856 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4857 return tem;
4859 if (!REG_P (tem)
4860 && ! CONSTANT_P (tem))
4861 tem = copy_addr_to_reg (tem);
4862 return tem;
4866 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4867 failed and the caller should emit a normal call. */
4869 static rtx
4870 expand_builtin_alloca (tree exp)
4872 rtx op0;
4873 rtx result;
4874 unsigned int align;
4875 tree fndecl = get_callee_fndecl (exp);
4876 HOST_WIDE_INT max_size;
4877 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4878 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4879 bool valid_arglist
4880 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4881 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4882 VOID_TYPE)
4883 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4884 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4885 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4887 if (!valid_arglist)
4888 return NULL_RTX;
4890 /* Compute the argument. */
4891 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4893 /* Compute the alignment. */
4894 align = (fcode == BUILT_IN_ALLOCA
4895 ? BIGGEST_ALIGNMENT
4896 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4898 /* Compute the maximum size. */
4899 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4900 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4901 : -1);
4903 /* Allocate the desired space. If the allocation stems from the declaration
4904 of a variable-sized object, it cannot accumulate. */
4905 result
4906 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4907 result = convert_memory_address (ptr_mode, result);
4909 /* Dynamic allocations for variables are recorded during gimplification. */
4910 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4911 record_dynamic_alloc (exp);
4913 return result;
4916 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
4917 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4918 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
4919 handle_builtin_stack_restore function. */
4921 static rtx
4922 expand_asan_emit_allocas_unpoison (tree exp)
4924 tree arg0 = CALL_EXPR_ARG (exp, 0);
4925 tree arg1 = CALL_EXPR_ARG (exp, 1);
4926 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4927 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4928 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
4929 stack_pointer_rtx, NULL_RTX, 0,
4930 OPTAB_LIB_WIDEN);
4931 off = convert_modes (ptr_mode, Pmode, off, 0);
4932 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
4933 OPTAB_LIB_WIDEN);
4934 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4935 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4936 top, ptr_mode, bot, ptr_mode);
4937 return ret;
4940 /* Expand a call to bswap builtin in EXP.
4941 Return NULL_RTX if a normal call should be emitted rather than expanding the
4942 function in-line. If convenient, the result should be placed in TARGET.
4943 SUBTARGET may be used as the target for computing one of EXP's operands. */
4945 static rtx
4946 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4947 rtx subtarget)
4949 tree arg;
4950 rtx op0;
4952 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4953 return NULL_RTX;
4955 arg = CALL_EXPR_ARG (exp, 0);
4956 op0 = expand_expr (arg,
4957 subtarget && GET_MODE (subtarget) == target_mode
4958 ? subtarget : NULL_RTX,
4959 target_mode, EXPAND_NORMAL);
4960 if (GET_MODE (op0) != target_mode)
4961 op0 = convert_to_mode (target_mode, op0, 1);
4963 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4965 gcc_assert (target);
4967 return convert_to_mode (target_mode, target, 1);
4970 /* Expand a call to a unary builtin in EXP.
4971 Return NULL_RTX if a normal call should be emitted rather than expanding the
4972 function in-line. If convenient, the result should be placed in TARGET.
4973 SUBTARGET may be used as the target for computing one of EXP's operands. */
4975 static rtx
4976 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4977 rtx subtarget, optab op_optab)
4979 rtx op0;
4981 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4982 return NULL_RTX;
4984 /* Compute the argument. */
4985 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4986 (subtarget
4987 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4988 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4989 VOIDmode, EXPAND_NORMAL);
4990 /* Compute op, into TARGET if possible.
4991 Set TARGET to wherever the result comes back. */
4992 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4993 op_optab, op0, target, op_optab != clrsb_optab);
4994 gcc_assert (target);
4996 return convert_to_mode (target_mode, target, 0);
4999 /* Expand a call to __builtin_expect. We just return our argument
5000 as the builtin_expect semantic should've been already executed by
5001 tree branch prediction pass. */
5003 static rtx
5004 expand_builtin_expect (tree exp, rtx target)
5006 tree arg;
5008 if (call_expr_nargs (exp) < 2)
5009 return const0_rtx;
5010 arg = CALL_EXPR_ARG (exp, 0);
5012 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5013 /* When guessing was done, the hints should be already stripped away. */
5014 gcc_assert (!flag_guess_branch_prob
5015 || optimize == 0 || seen_error ());
5016 return target;
5019 /* Expand a call to __builtin_expect_with_probability. We just return our
5020 argument as the builtin_expect semantic should've been already executed by
5021 tree branch prediction pass. */
5023 static rtx
5024 expand_builtin_expect_with_probability (tree exp, rtx target)
5026 tree arg;
5028 if (call_expr_nargs (exp) < 3)
5029 return const0_rtx;
5030 arg = CALL_EXPR_ARG (exp, 0);
5032 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5033 /* When guessing was done, the hints should be already stripped away. */
5034 gcc_assert (!flag_guess_branch_prob
5035 || optimize == 0 || seen_error ());
5036 return target;
5040 /* Expand a call to __builtin_assume_aligned. We just return our first
5041 argument as the builtin_assume_aligned semantic should've been already
5042 executed by CCP. */
5044 static rtx
5045 expand_builtin_assume_aligned (tree exp, rtx target)
5047 if (call_expr_nargs (exp) < 2)
5048 return const0_rtx;
5049 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5050 EXPAND_NORMAL);
5051 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5052 && (call_expr_nargs (exp) < 3
5053 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5054 return target;
5057 void
5058 expand_builtin_trap (void)
5060 if (targetm.have_trap ())
5062 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5063 /* For trap insns when not accumulating outgoing args force
5064 REG_ARGS_SIZE note to prevent crossjumping of calls with
5065 different args sizes. */
5066 if (!ACCUMULATE_OUTGOING_ARGS)
5067 add_args_size_note (insn, stack_pointer_delta);
5069 else
5071 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5072 tree call_expr = build_call_expr (fn, 0);
5073 expand_call (call_expr, NULL_RTX, false);
5076 emit_barrier ();
5079 /* Expand a call to __builtin_unreachable. We do nothing except emit
5080 a barrier saying that control flow will not pass here.
5082 It is the responsibility of the program being compiled to ensure
5083 that control flow does never reach __builtin_unreachable. */
5084 static void
5085 expand_builtin_unreachable (void)
5087 emit_barrier ();
5090 /* Expand EXP, a call to fabs, fabsf or fabsl.
5091 Return NULL_RTX if a normal call should be emitted rather than expanding
5092 the function inline. If convenient, the result should be placed
5093 in TARGET. SUBTARGET may be used as the target for computing
5094 the operand. */
5096 static rtx
5097 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5099 machine_mode mode;
5100 tree arg;
5101 rtx op0;
5103 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5104 return NULL_RTX;
5106 arg = CALL_EXPR_ARG (exp, 0);
5107 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5108 mode = TYPE_MODE (TREE_TYPE (arg));
5109 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5110 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5113 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5114 Return NULL is a normal call should be emitted rather than expanding the
5115 function inline. If convenient, the result should be placed in TARGET.
5116 SUBTARGET may be used as the target for computing the operand. */
5118 static rtx
5119 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5121 rtx op0, op1;
5122 tree arg;
5124 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5125 return NULL_RTX;
5127 arg = CALL_EXPR_ARG (exp, 0);
5128 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5130 arg = CALL_EXPR_ARG (exp, 1);
5131 op1 = expand_normal (arg);
5133 return expand_copysign (op0, op1, target);
5136 /* Emit a call to __builtin___clear_cache. */
5138 void
5139 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5141 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5142 BUILTIN_ASM_NAME_PTR
5143 (BUILT_IN_CLEAR_CACHE));
5145 emit_library_call (callee,
5146 LCT_NORMAL, VOIDmode,
5147 convert_memory_address (ptr_mode, begin), ptr_mode,
5148 convert_memory_address (ptr_mode, end), ptr_mode);
5151 /* Emit a call to __builtin___clear_cache, unless the target specifies
5152 it as do-nothing. This function can be used by trampoline
5153 finalizers to duplicate the effects of expanding a call to the
5154 clear_cache builtin. */
5156 void
5157 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5159 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5160 || CONST_INT_P (begin))
5161 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5162 || CONST_INT_P (end)));
5164 if (targetm.have_clear_cache ())
5166 /* We have a "clear_cache" insn, and it will handle everything. */
5167 class expand_operand ops[2];
5169 create_address_operand (&ops[0], begin);
5170 create_address_operand (&ops[1], end);
5172 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5173 return;
5175 else
5177 #ifndef CLEAR_INSN_CACHE
5178 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5179 does nothing. There is no need to call it. Do nothing. */
5180 return;
5181 #endif /* CLEAR_INSN_CACHE */
5184 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5187 /* Expand a call to __builtin___clear_cache. */
5189 static void
5190 expand_builtin___clear_cache (tree exp)
5192 tree begin, end;
5193 rtx begin_rtx, end_rtx;
5195 /* We must not expand to a library call. If we did, any
5196 fallback library function in libgcc that might contain a call to
5197 __builtin___clear_cache() would recurse infinitely. */
5198 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5200 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5201 return;
5204 begin = CALL_EXPR_ARG (exp, 0);
5205 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5207 end = CALL_EXPR_ARG (exp, 1);
5208 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5210 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5213 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5215 static rtx
5216 round_trampoline_addr (rtx tramp)
5218 rtx temp, addend, mask;
5220 /* If we don't need too much alignment, we'll have been guaranteed
5221 proper alignment by get_trampoline_type. */
5222 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5223 return tramp;
5225 /* Round address up to desired boundary. */
5226 temp = gen_reg_rtx (Pmode);
5227 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5228 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5230 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5231 temp, 0, OPTAB_LIB_WIDEN);
5232 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5233 temp, 0, OPTAB_LIB_WIDEN);
5235 return tramp;
5238 static rtx
5239 expand_builtin_init_trampoline (tree exp, bool onstack)
5241 tree t_tramp, t_func, t_chain;
5242 rtx m_tramp, r_tramp, r_chain, tmp;
5244 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5245 POINTER_TYPE, VOID_TYPE))
5246 return NULL_RTX;
5248 t_tramp = CALL_EXPR_ARG (exp, 0);
5249 t_func = CALL_EXPR_ARG (exp, 1);
5250 t_chain = CALL_EXPR_ARG (exp, 2);
5252 r_tramp = expand_normal (t_tramp);
5253 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5254 MEM_NOTRAP_P (m_tramp) = 1;
5256 /* If ONSTACK, the TRAMP argument should be the address of a field
5257 within the local function's FRAME decl. Either way, let's see if
5258 we can fill in the MEM_ATTRs for this memory. */
5259 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5260 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5262 /* Creator of a heap trampoline is responsible for making sure the
5263 address is aligned to at least STACK_BOUNDARY. Normally malloc
5264 will ensure this anyhow. */
5265 tmp = round_trampoline_addr (r_tramp);
5266 if (tmp != r_tramp)
5268 m_tramp = change_address (m_tramp, BLKmode, tmp);
5269 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5270 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5273 /* The FUNC argument should be the address of the nested function.
5274 Extract the actual function decl to pass to the hook. */
5275 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5276 t_func = TREE_OPERAND (t_func, 0);
5277 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5279 r_chain = expand_normal (t_chain);
5281 /* Generate insns to initialize the trampoline. */
5282 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5284 if (onstack)
5286 trampolines_created = 1;
5288 if (targetm.calls.custom_function_descriptors != 0)
5289 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5290 "trampoline generated for nested function %qD", t_func);
5293 return const0_rtx;
5296 static rtx
5297 expand_builtin_adjust_trampoline (tree exp)
5299 rtx tramp;
5301 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5302 return NULL_RTX;
5304 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5305 tramp = round_trampoline_addr (tramp);
5306 if (targetm.calls.trampoline_adjust_address)
5307 tramp = targetm.calls.trampoline_adjust_address (tramp);
5309 return tramp;
5312 /* Expand a call to the builtin descriptor initialization routine.
5313 A descriptor is made up of a couple of pointers to the static
5314 chain and the code entry in this order. */
5316 static rtx
5317 expand_builtin_init_descriptor (tree exp)
5319 tree t_descr, t_func, t_chain;
5320 rtx m_descr, r_descr, r_func, r_chain;
5322 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5323 VOID_TYPE))
5324 return NULL_RTX;
5326 t_descr = CALL_EXPR_ARG (exp, 0);
5327 t_func = CALL_EXPR_ARG (exp, 1);
5328 t_chain = CALL_EXPR_ARG (exp, 2);
5330 r_descr = expand_normal (t_descr);
5331 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5332 MEM_NOTRAP_P (m_descr) = 1;
5333 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5335 r_func = expand_normal (t_func);
5336 r_chain = expand_normal (t_chain);
5338 /* Generate insns to initialize the descriptor. */
5339 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5340 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5341 POINTER_SIZE / BITS_PER_UNIT), r_func);
5343 return const0_rtx;
5346 /* Expand a call to the builtin descriptor adjustment routine. */
5348 static rtx
5349 expand_builtin_adjust_descriptor (tree exp)
5351 rtx tramp;
5353 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5354 return NULL_RTX;
5356 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5358 /* Unalign the descriptor to allow runtime identification. */
5359 tramp = plus_constant (ptr_mode, tramp,
5360 targetm.calls.custom_function_descriptors);
5362 return force_operand (tramp, NULL_RTX);
5365 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5366 function. The function first checks whether the back end provides
5367 an insn to implement signbit for the respective mode. If not, it
5368 checks whether the floating point format of the value is such that
5369 the sign bit can be extracted. If that is not the case, error out.
5370 EXP is the expression that is a call to the builtin function; if
5371 convenient, the result should be placed in TARGET. */
5372 static rtx
5373 expand_builtin_signbit (tree exp, rtx target)
5375 const struct real_format *fmt;
5376 scalar_float_mode fmode;
5377 scalar_int_mode rmode, imode;
5378 tree arg;
5379 int word, bitpos;
5380 enum insn_code icode;
5381 rtx temp;
5382 location_t loc = EXPR_LOCATION (exp);
5384 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5385 return NULL_RTX;
5387 arg = CALL_EXPR_ARG (exp, 0);
5388 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5389 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5390 fmt = REAL_MODE_FORMAT (fmode);
5392 arg = builtin_save_expr (arg);
5394 /* Expand the argument yielding a RTX expression. */
5395 temp = expand_normal (arg);
5397 /* Check if the back end provides an insn that handles signbit for the
5398 argument's mode. */
5399 icode = optab_handler (signbit_optab, fmode);
5400 if (icode != CODE_FOR_nothing)
5402 rtx_insn *last = get_last_insn ();
5403 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5404 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5405 return target;
5406 delete_insns_since (last);
5409 /* For floating point formats without a sign bit, implement signbit
5410 as "ARG < 0.0". */
5411 bitpos = fmt->signbit_ro;
5412 if (bitpos < 0)
5414 /* But we can't do this if the format supports signed zero. */
5415 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5417 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5418 build_real (TREE_TYPE (arg), dconst0));
5419 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5422 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5424 imode = int_mode_for_mode (fmode).require ();
5425 temp = gen_lowpart (imode, temp);
5427 else
5429 imode = word_mode;
5430 /* Handle targets with different FP word orders. */
5431 if (FLOAT_WORDS_BIG_ENDIAN)
5432 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5433 else
5434 word = bitpos / BITS_PER_WORD;
5435 temp = operand_subword_force (temp, word, fmode);
5436 bitpos = bitpos % BITS_PER_WORD;
5439 /* Force the intermediate word_mode (or narrower) result into a
5440 register. This avoids attempting to create paradoxical SUBREGs
5441 of floating point modes below. */
5442 temp = force_reg (imode, temp);
5444 /* If the bitpos is within the "result mode" lowpart, the operation
5445 can be implement with a single bitwise AND. Otherwise, we need
5446 a right shift and an AND. */
5448 if (bitpos < GET_MODE_BITSIZE (rmode))
5450 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5452 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5453 temp = gen_lowpart (rmode, temp);
5454 temp = expand_binop (rmode, and_optab, temp,
5455 immed_wide_int_const (mask, rmode),
5456 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5458 else
5460 /* Perform a logical right shift to place the signbit in the least
5461 significant bit, then truncate the result to the desired mode
5462 and mask just this bit. */
5463 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5464 temp = gen_lowpart (rmode, temp);
5465 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5466 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5469 return temp;
5472 /* Expand fork or exec calls. TARGET is the desired target of the
5473 call. EXP is the call. FN is the
5474 identificator of the actual function. IGNORE is nonzero if the
5475 value is to be ignored. */
5477 static rtx
5478 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5480 tree id, decl;
5481 tree call;
5483 /* If we are not profiling, just call the function. */
5484 if (!profile_arc_flag)
5485 return NULL_RTX;
5487 /* Otherwise call the wrapper. This should be equivalent for the rest of
5488 compiler, so the code does not diverge, and the wrapper may run the
5489 code necessary for keeping the profiling sane. */
5491 switch (DECL_FUNCTION_CODE (fn))
5493 case BUILT_IN_FORK:
5494 id = get_identifier ("__gcov_fork");
5495 break;
5497 case BUILT_IN_EXECL:
5498 id = get_identifier ("__gcov_execl");
5499 break;
5501 case BUILT_IN_EXECV:
5502 id = get_identifier ("__gcov_execv");
5503 break;
5505 case BUILT_IN_EXECLP:
5506 id = get_identifier ("__gcov_execlp");
5507 break;
5509 case BUILT_IN_EXECLE:
5510 id = get_identifier ("__gcov_execle");
5511 break;
5513 case BUILT_IN_EXECVP:
5514 id = get_identifier ("__gcov_execvp");
5515 break;
5517 case BUILT_IN_EXECVE:
5518 id = get_identifier ("__gcov_execve");
5519 break;
5521 default:
5522 gcc_unreachable ();
5525 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5526 FUNCTION_DECL, id, TREE_TYPE (fn));
5527 DECL_EXTERNAL (decl) = 1;
5528 TREE_PUBLIC (decl) = 1;
5529 DECL_ARTIFICIAL (decl) = 1;
5530 TREE_NOTHROW (decl) = 1;
5531 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5532 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5533 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5534 return expand_call (call, target, ignore);
5539 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5540 the pointer in these functions is void*, the tree optimizers may remove
5541 casts. The mode computed in expand_builtin isn't reliable either, due
5542 to __sync_bool_compare_and_swap.
5544 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5545 group of builtins. This gives us log2 of the mode size. */
5547 static inline machine_mode
5548 get_builtin_sync_mode (int fcode_diff)
5550 /* The size is not negotiable, so ask not to get BLKmode in return
5551 if the target indicates that a smaller size would be better. */
5552 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5555 /* Expand the memory expression LOC and return the appropriate memory operand
5556 for the builtin_sync operations. */
5558 static rtx
5559 get_builtin_sync_mem (tree loc, machine_mode mode)
5561 rtx addr, mem;
5562 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5563 ? TREE_TYPE (TREE_TYPE (loc))
5564 : TREE_TYPE (loc));
5565 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5567 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5568 addr = convert_memory_address (addr_mode, addr);
5570 /* Note that we explicitly do not want any alias information for this
5571 memory, so that we kill all other live memories. Otherwise we don't
5572 satisfy the full barrier semantics of the intrinsic. */
5573 mem = gen_rtx_MEM (mode, addr);
5575 set_mem_addr_space (mem, addr_space);
5577 mem = validize_mem (mem);
5579 /* The alignment needs to be at least according to that of the mode. */
5580 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5581 get_pointer_alignment (loc)));
5582 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5583 MEM_VOLATILE_P (mem) = 1;
5585 return mem;
5588 /* Make sure an argument is in the right mode.
5589 EXP is the tree argument.
5590 MODE is the mode it should be in. */
5592 static rtx
5593 expand_expr_force_mode (tree exp, machine_mode mode)
5595 rtx val;
5596 machine_mode old_mode;
5598 if (TREE_CODE (exp) == SSA_NAME
5599 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5601 /* Undo argument promotion if possible, as combine might not
5602 be able to do it later due to MEM_VOLATILE_P uses in the
5603 patterns. */
5604 gimple *g = get_gimple_for_ssa_name (exp);
5605 if (g && gimple_assign_cast_p (g))
5607 tree rhs = gimple_assign_rhs1 (g);
5608 tree_code code = gimple_assign_rhs_code (g);
5609 if (CONVERT_EXPR_CODE_P (code)
5610 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5611 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5612 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5613 && (TYPE_PRECISION (TREE_TYPE (exp))
5614 > TYPE_PRECISION (TREE_TYPE (rhs))))
5615 exp = rhs;
5619 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5620 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5621 of CONST_INTs, where we know the old_mode only from the call argument. */
5623 old_mode = GET_MODE (val);
5624 if (old_mode == VOIDmode)
5625 old_mode = TYPE_MODE (TREE_TYPE (exp));
5626 val = convert_modes (mode, old_mode, val, 1);
5627 return val;
5631 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5632 EXP is the CALL_EXPR. CODE is the rtx code
5633 that corresponds to the arithmetic or logical operation from the name;
5634 an exception here is that NOT actually means NAND. TARGET is an optional
5635 place for us to store the results; AFTER is true if this is the
5636 fetch_and_xxx form. */
5638 static rtx
5639 expand_builtin_sync_operation (machine_mode mode, tree exp,
5640 enum rtx_code code, bool after,
5641 rtx target)
5643 rtx val, mem;
5644 location_t loc = EXPR_LOCATION (exp);
5646 if (code == NOT && warn_sync_nand)
5648 tree fndecl = get_callee_fndecl (exp);
5649 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5651 static bool warned_f_a_n, warned_n_a_f;
5653 switch (fcode)
5655 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5656 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5657 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5658 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5659 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5660 if (warned_f_a_n)
5661 break;
5663 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5664 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5665 warned_f_a_n = true;
5666 break;
5668 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5669 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5670 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5671 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5672 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5673 if (warned_n_a_f)
5674 break;
5676 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5677 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5678 warned_n_a_f = true;
5679 break;
5681 default:
5682 gcc_unreachable ();
5686 /* Expand the operands. */
5687 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5688 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5690 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5691 after);
5694 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5695 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5696 true if this is the boolean form. TARGET is a place for us to store the
5697 results; this is NOT optional if IS_BOOL is true. */
5699 static rtx
5700 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5701 bool is_bool, rtx target)
5703 rtx old_val, new_val, mem;
5704 rtx *pbool, *poval;
5706 /* Expand the operands. */
5707 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5708 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5709 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5711 pbool = poval = NULL;
5712 if (target != const0_rtx)
5714 if (is_bool)
5715 pbool = &target;
5716 else
5717 poval = &target;
5719 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5720 false, MEMMODEL_SYNC_SEQ_CST,
5721 MEMMODEL_SYNC_SEQ_CST))
5722 return NULL_RTX;
5724 return target;
5727 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5728 general form is actually an atomic exchange, and some targets only
5729 support a reduced form with the second argument being a constant 1.
5730 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5731 the results. */
5733 static rtx
5734 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5735 rtx target)
5737 rtx val, mem;
5739 /* Expand the operands. */
5740 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5741 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5743 return expand_sync_lock_test_and_set (target, mem, val);
5746 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5748 static void
5749 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5751 rtx mem;
5753 /* Expand the operands. */
5754 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5756 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5759 /* Given an integer representing an ``enum memmodel'', verify its
5760 correctness and return the memory model enum. */
5762 static enum memmodel
5763 get_memmodel (tree exp)
5765 rtx op;
5766 unsigned HOST_WIDE_INT val;
5767 location_t loc
5768 = expansion_point_location_if_in_system_header (input_location);
5770 /* If the parameter is not a constant, it's a run time value so we'll just
5771 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5772 if (TREE_CODE (exp) != INTEGER_CST)
5773 return MEMMODEL_SEQ_CST;
5775 op = expand_normal (exp);
5777 val = INTVAL (op);
5778 if (targetm.memmodel_check)
5779 val = targetm.memmodel_check (val);
5780 else if (val & ~MEMMODEL_MASK)
5782 warning_at (loc, OPT_Winvalid_memory_model,
5783 "unknown architecture specifier in memory model to builtin");
5784 return MEMMODEL_SEQ_CST;
5787 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5788 if (memmodel_base (val) >= MEMMODEL_LAST)
5790 warning_at (loc, OPT_Winvalid_memory_model,
5791 "invalid memory model argument to builtin");
5792 return MEMMODEL_SEQ_CST;
5795 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5796 be conservative and promote consume to acquire. */
5797 if (val == MEMMODEL_CONSUME)
5798 val = MEMMODEL_ACQUIRE;
5800 return (enum memmodel) val;
5803 /* Expand the __atomic_exchange intrinsic:
5804 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5805 EXP is the CALL_EXPR.
5806 TARGET is an optional place for us to store the results. */
5808 static rtx
5809 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5811 rtx val, mem;
5812 enum memmodel model;
5814 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5816 if (!flag_inline_atomics)
5817 return NULL_RTX;
5819 /* Expand the operands. */
5820 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5821 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5823 return expand_atomic_exchange (target, mem, val, model);
5826 /* Expand the __atomic_compare_exchange intrinsic:
5827 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5828 TYPE desired, BOOL weak,
5829 enum memmodel success,
5830 enum memmodel failure)
5831 EXP is the CALL_EXPR.
5832 TARGET is an optional place for us to store the results. */
5834 static rtx
5835 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5836 rtx target)
5838 rtx expect, desired, mem, oldval;
5839 rtx_code_label *label;
5840 enum memmodel success, failure;
5841 tree weak;
5842 bool is_weak;
5843 location_t loc
5844 = expansion_point_location_if_in_system_header (input_location);
5846 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5847 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5849 if (failure > success)
5851 warning_at (loc, OPT_Winvalid_memory_model,
5852 "failure memory model cannot be stronger than success "
5853 "memory model for %<__atomic_compare_exchange%>");
5854 success = MEMMODEL_SEQ_CST;
5857 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5859 warning_at (loc, OPT_Winvalid_memory_model,
5860 "invalid failure memory model for "
5861 "%<__atomic_compare_exchange%>");
5862 failure = MEMMODEL_SEQ_CST;
5863 success = MEMMODEL_SEQ_CST;
5867 if (!flag_inline_atomics)
5868 return NULL_RTX;
5870 /* Expand the operands. */
5871 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5873 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5874 expect = convert_memory_address (Pmode, expect);
5875 expect = gen_rtx_MEM (mode, expect);
5876 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5878 weak = CALL_EXPR_ARG (exp, 3);
5879 is_weak = false;
5880 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5881 is_weak = true;
5883 if (target == const0_rtx)
5884 target = NULL;
5886 /* Lest the rtl backend create a race condition with an imporoper store
5887 to memory, always create a new pseudo for OLDVAL. */
5888 oldval = NULL;
5890 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5891 is_weak, success, failure))
5892 return NULL_RTX;
5894 /* Conditionally store back to EXPECT, lest we create a race condition
5895 with an improper store to memory. */
5896 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5897 the normal case where EXPECT is totally private, i.e. a register. At
5898 which point the store can be unconditional. */
5899 label = gen_label_rtx ();
5900 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5901 GET_MODE (target), 1, label);
5902 emit_move_insn (expect, oldval);
5903 emit_label (label);
5905 return target;
5908 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5909 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5910 call. The weak parameter must be dropped to match the expected parameter
5911 list and the expected argument changed from value to pointer to memory
5912 slot. */
5914 static void
5915 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5917 unsigned int z;
5918 vec<tree, va_gc> *vec;
5920 vec_alloc (vec, 5);
5921 vec->quick_push (gimple_call_arg (call, 0));
5922 tree expected = gimple_call_arg (call, 1);
5923 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5924 TREE_TYPE (expected));
5925 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5926 if (expd != x)
5927 emit_move_insn (x, expd);
5928 tree v = make_tree (TREE_TYPE (expected), x);
5929 vec->quick_push (build1 (ADDR_EXPR,
5930 build_pointer_type (TREE_TYPE (expected)), v));
5931 vec->quick_push (gimple_call_arg (call, 2));
5932 /* Skip the boolean weak parameter. */
5933 for (z = 4; z < 6; z++)
5934 vec->quick_push (gimple_call_arg (call, z));
5935 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5936 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5937 gcc_assert (bytes_log2 < 5);
5938 built_in_function fncode
5939 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5940 + bytes_log2);
5941 tree fndecl = builtin_decl_explicit (fncode);
5942 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5943 fndecl);
5944 tree exp = build_call_vec (boolean_type_node, fn, vec);
5945 tree lhs = gimple_call_lhs (call);
5946 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5947 if (lhs)
5949 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5950 if (GET_MODE (boolret) != mode)
5951 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5952 x = force_reg (mode, x);
5953 write_complex_part (target, boolret, true);
5954 write_complex_part (target, x, false);
5958 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5960 void
5961 expand_ifn_atomic_compare_exchange (gcall *call)
5963 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5964 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5965 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5966 rtx expect, desired, mem, oldval, boolret;
5967 enum memmodel success, failure;
5968 tree lhs;
5969 bool is_weak;
5970 location_t loc
5971 = expansion_point_location_if_in_system_header (gimple_location (call));
5973 success = get_memmodel (gimple_call_arg (call, 4));
5974 failure = get_memmodel (gimple_call_arg (call, 5));
5976 if (failure > success)
5978 warning_at (loc, OPT_Winvalid_memory_model,
5979 "failure memory model cannot be stronger than success "
5980 "memory model for %<__atomic_compare_exchange%>");
5981 success = MEMMODEL_SEQ_CST;
5984 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5986 warning_at (loc, OPT_Winvalid_memory_model,
5987 "invalid failure memory model for "
5988 "%<__atomic_compare_exchange%>");
5989 failure = MEMMODEL_SEQ_CST;
5990 success = MEMMODEL_SEQ_CST;
5993 if (!flag_inline_atomics)
5995 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5996 return;
5999 /* Expand the operands. */
6000 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6002 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6003 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6005 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6007 boolret = NULL;
6008 oldval = NULL;
6010 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6011 is_weak, success, failure))
6013 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6014 return;
6017 lhs = gimple_call_lhs (call);
6018 if (lhs)
6020 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6021 if (GET_MODE (boolret) != mode)
6022 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6023 write_complex_part (target, boolret, true);
6024 write_complex_part (target, oldval, false);
6028 /* Expand the __atomic_load intrinsic:
6029 TYPE __atomic_load (TYPE *object, enum memmodel)
6030 EXP is the CALL_EXPR.
6031 TARGET is an optional place for us to store the results. */
6033 static rtx
6034 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6036 rtx mem;
6037 enum memmodel model;
6039 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6040 if (is_mm_release (model) || is_mm_acq_rel (model))
6042 location_t loc
6043 = expansion_point_location_if_in_system_header (input_location);
6044 warning_at (loc, OPT_Winvalid_memory_model,
6045 "invalid memory model for %<__atomic_load%>");
6046 model = MEMMODEL_SEQ_CST;
6049 if (!flag_inline_atomics)
6050 return NULL_RTX;
6052 /* Expand the operand. */
6053 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6055 return expand_atomic_load (target, mem, model);
6059 /* Expand the __atomic_store intrinsic:
6060 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6061 EXP is the CALL_EXPR.
6062 TARGET is an optional place for us to store the results. */
6064 static rtx
6065 expand_builtin_atomic_store (machine_mode mode, tree exp)
6067 rtx mem, val;
6068 enum memmodel model;
6070 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6071 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6072 || is_mm_release (model)))
6074 location_t loc
6075 = expansion_point_location_if_in_system_header (input_location);
6076 warning_at (loc, OPT_Winvalid_memory_model,
6077 "invalid memory model for %<__atomic_store%>");
6078 model = MEMMODEL_SEQ_CST;
6081 if (!flag_inline_atomics)
6082 return NULL_RTX;
6084 /* Expand the operands. */
6085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6086 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6088 return expand_atomic_store (mem, val, model, false);
6091 /* Expand the __atomic_fetch_XXX intrinsic:
6092 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6093 EXP is the CALL_EXPR.
6094 TARGET is an optional place for us to store the results.
6095 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6096 FETCH_AFTER is true if returning the result of the operation.
6097 FETCH_AFTER is false if returning the value before the operation.
6098 IGNORE is true if the result is not used.
6099 EXT_CALL is the correct builtin for an external call if this cannot be
6100 resolved to an instruction sequence. */
6102 static rtx
6103 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6104 enum rtx_code code, bool fetch_after,
6105 bool ignore, enum built_in_function ext_call)
6107 rtx val, mem, ret;
6108 enum memmodel model;
6109 tree fndecl;
6110 tree addr;
6112 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6114 /* Expand the operands. */
6115 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6116 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6118 /* Only try generating instructions if inlining is turned on. */
6119 if (flag_inline_atomics)
6121 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6122 if (ret)
6123 return ret;
6126 /* Return if a different routine isn't needed for the library call. */
6127 if (ext_call == BUILT_IN_NONE)
6128 return NULL_RTX;
6130 /* Change the call to the specified function. */
6131 fndecl = get_callee_fndecl (exp);
6132 addr = CALL_EXPR_FN (exp);
6133 STRIP_NOPS (addr);
6135 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6136 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6138 /* If we will emit code after the call, the call cannot be a tail call.
6139 If it is emitted as a tail call, a barrier is emitted after it, and
6140 then all trailing code is removed. */
6141 if (!ignore)
6142 CALL_EXPR_TAILCALL (exp) = 0;
6144 /* Expand the call here so we can emit trailing code. */
6145 ret = expand_call (exp, target, ignore);
6147 /* Replace the original function just in case it matters. */
6148 TREE_OPERAND (addr, 0) = fndecl;
6150 /* Then issue the arithmetic correction to return the right result. */
6151 if (!ignore)
6153 if (code == NOT)
6155 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6156 OPTAB_LIB_WIDEN);
6157 ret = expand_simple_unop (mode, NOT, ret, target, true);
6159 else
6160 ret = expand_simple_binop (mode, code, ret, val, target, true,
6161 OPTAB_LIB_WIDEN);
6163 return ret;
6166 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6168 void
6169 expand_ifn_atomic_bit_test_and (gcall *call)
6171 tree ptr = gimple_call_arg (call, 0);
6172 tree bit = gimple_call_arg (call, 1);
6173 tree flag = gimple_call_arg (call, 2);
6174 tree lhs = gimple_call_lhs (call);
6175 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6176 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6177 enum rtx_code code;
6178 optab optab;
6179 class expand_operand ops[5];
6181 gcc_assert (flag_inline_atomics);
6183 if (gimple_call_num_args (call) == 4)
6184 model = get_memmodel (gimple_call_arg (call, 3));
6186 rtx mem = get_builtin_sync_mem (ptr, mode);
6187 rtx val = expand_expr_force_mode (bit, mode);
6189 switch (gimple_call_internal_fn (call))
6191 case IFN_ATOMIC_BIT_TEST_AND_SET:
6192 code = IOR;
6193 optab = atomic_bit_test_and_set_optab;
6194 break;
6195 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6196 code = XOR;
6197 optab = atomic_bit_test_and_complement_optab;
6198 break;
6199 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6200 code = AND;
6201 optab = atomic_bit_test_and_reset_optab;
6202 break;
6203 default:
6204 gcc_unreachable ();
6207 if (lhs == NULL_TREE)
6209 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6210 val, NULL_RTX, true, OPTAB_DIRECT);
6211 if (code == AND)
6212 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6213 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6214 return;
6217 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6218 enum insn_code icode = direct_optab_handler (optab, mode);
6219 gcc_assert (icode != CODE_FOR_nothing);
6220 create_output_operand (&ops[0], target, mode);
6221 create_fixed_operand (&ops[1], mem);
6222 create_convert_operand_to (&ops[2], val, mode, true);
6223 create_integer_operand (&ops[3], model);
6224 create_integer_operand (&ops[4], integer_onep (flag));
6225 if (maybe_expand_insn (icode, 5, ops))
6226 return;
6228 rtx bitval = val;
6229 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6230 val, NULL_RTX, true, OPTAB_DIRECT);
6231 rtx maskval = val;
6232 if (code == AND)
6233 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6234 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6235 code, model, false);
6236 if (integer_onep (flag))
6238 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6239 NULL_RTX, true, OPTAB_DIRECT);
6240 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6241 true, OPTAB_DIRECT);
6243 else
6244 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6245 OPTAB_DIRECT);
6246 if (result != target)
6247 emit_move_insn (target, result);
6250 /* Expand an atomic clear operation.
6251 void _atomic_clear (BOOL *obj, enum memmodel)
6252 EXP is the call expression. */
6254 static rtx
6255 expand_builtin_atomic_clear (tree exp)
6257 machine_mode mode;
6258 rtx mem, ret;
6259 enum memmodel model;
6261 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6262 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6263 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6265 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6267 location_t loc
6268 = expansion_point_location_if_in_system_header (input_location);
6269 warning_at (loc, OPT_Winvalid_memory_model,
6270 "invalid memory model for %<__atomic_store%>");
6271 model = MEMMODEL_SEQ_CST;
6274 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6275 Failing that, a store is issued by __atomic_store. The only way this can
6276 fail is if the bool type is larger than a word size. Unlikely, but
6277 handle it anyway for completeness. Assume a single threaded model since
6278 there is no atomic support in this case, and no barriers are required. */
6279 ret = expand_atomic_store (mem, const0_rtx, model, true);
6280 if (!ret)
6281 emit_move_insn (mem, const0_rtx);
6282 return const0_rtx;
6285 /* Expand an atomic test_and_set operation.
6286 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6287 EXP is the call expression. */
6289 static rtx
6290 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6292 rtx mem;
6293 enum memmodel model;
6294 machine_mode mode;
6296 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6297 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6298 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6300 return expand_atomic_test_and_set (target, mem, model);
6304 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6305 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6307 static tree
6308 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6310 int size;
6311 machine_mode mode;
6312 unsigned int mode_align, type_align;
6314 if (TREE_CODE (arg0) != INTEGER_CST)
6315 return NULL_TREE;
6317 /* We need a corresponding integer mode for the access to be lock-free. */
6318 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6319 if (!int_mode_for_size (size, 0).exists (&mode))
6320 return boolean_false_node;
6322 mode_align = GET_MODE_ALIGNMENT (mode);
6324 if (TREE_CODE (arg1) == INTEGER_CST)
6326 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6328 /* Either this argument is null, or it's a fake pointer encoding
6329 the alignment of the object. */
6330 val = least_bit_hwi (val);
6331 val *= BITS_PER_UNIT;
6333 if (val == 0 || mode_align < val)
6334 type_align = mode_align;
6335 else
6336 type_align = val;
6338 else
6340 tree ttype = TREE_TYPE (arg1);
6342 /* This function is usually invoked and folded immediately by the front
6343 end before anything else has a chance to look at it. The pointer
6344 parameter at this point is usually cast to a void *, so check for that
6345 and look past the cast. */
6346 if (CONVERT_EXPR_P (arg1)
6347 && POINTER_TYPE_P (ttype)
6348 && VOID_TYPE_P (TREE_TYPE (ttype))
6349 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6350 arg1 = TREE_OPERAND (arg1, 0);
6352 ttype = TREE_TYPE (arg1);
6353 gcc_assert (POINTER_TYPE_P (ttype));
6355 /* Get the underlying type of the object. */
6356 ttype = TREE_TYPE (ttype);
6357 type_align = TYPE_ALIGN (ttype);
6360 /* If the object has smaller alignment, the lock free routines cannot
6361 be used. */
6362 if (type_align < mode_align)
6363 return boolean_false_node;
6365 /* Check if a compare_and_swap pattern exists for the mode which represents
6366 the required size. The pattern is not allowed to fail, so the existence
6367 of the pattern indicates support is present. Also require that an
6368 atomic load exists for the required size. */
6369 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6370 return boolean_true_node;
6371 else
6372 return boolean_false_node;
6375 /* Return true if the parameters to call EXP represent an object which will
6376 always generate lock free instructions. The first argument represents the
6377 size of the object, and the second parameter is a pointer to the object
6378 itself. If NULL is passed for the object, then the result is based on
6379 typical alignment for an object of the specified size. Otherwise return
6380 false. */
6382 static rtx
6383 expand_builtin_atomic_always_lock_free (tree exp)
6385 tree size;
6386 tree arg0 = CALL_EXPR_ARG (exp, 0);
6387 tree arg1 = CALL_EXPR_ARG (exp, 1);
6389 if (TREE_CODE (arg0) != INTEGER_CST)
6391 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6392 return const0_rtx;
6395 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6396 if (size == boolean_true_node)
6397 return const1_rtx;
6398 return const0_rtx;
6401 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6402 is lock free on this architecture. */
6404 static tree
6405 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6407 if (!flag_inline_atomics)
6408 return NULL_TREE;
6410 /* If it isn't always lock free, don't generate a result. */
6411 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6412 return boolean_true_node;
6414 return NULL_TREE;
6417 /* Return true if the parameters to call EXP represent an object which will
6418 always generate lock free instructions. The first argument represents the
6419 size of the object, and the second parameter is a pointer to the object
6420 itself. If NULL is passed for the object, then the result is based on
6421 typical alignment for an object of the specified size. Otherwise return
6422 NULL*/
6424 static rtx
6425 expand_builtin_atomic_is_lock_free (tree exp)
6427 tree size;
6428 tree arg0 = CALL_EXPR_ARG (exp, 0);
6429 tree arg1 = CALL_EXPR_ARG (exp, 1);
6431 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6433 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6434 return NULL_RTX;
6437 if (!flag_inline_atomics)
6438 return NULL_RTX;
6440 /* If the value is known at compile time, return the RTX for it. */
6441 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6442 if (size == boolean_true_node)
6443 return const1_rtx;
6445 return NULL_RTX;
6448 /* Expand the __atomic_thread_fence intrinsic:
6449 void __atomic_thread_fence (enum memmodel)
6450 EXP is the CALL_EXPR. */
6452 static void
6453 expand_builtin_atomic_thread_fence (tree exp)
6455 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6456 expand_mem_thread_fence (model);
6459 /* Expand the __atomic_signal_fence intrinsic:
6460 void __atomic_signal_fence (enum memmodel)
6461 EXP is the CALL_EXPR. */
6463 static void
6464 expand_builtin_atomic_signal_fence (tree exp)
6466 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6467 expand_mem_signal_fence (model);
6470 /* Expand the __sync_synchronize intrinsic. */
6472 static void
6473 expand_builtin_sync_synchronize (void)
6475 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6478 static rtx
6479 expand_builtin_thread_pointer (tree exp, rtx target)
6481 enum insn_code icode;
6482 if (!validate_arglist (exp, VOID_TYPE))
6483 return const0_rtx;
6484 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6485 if (icode != CODE_FOR_nothing)
6487 class expand_operand op;
6488 /* If the target is not sutitable then create a new target. */
6489 if (target == NULL_RTX
6490 || !REG_P (target)
6491 || GET_MODE (target) != Pmode)
6492 target = gen_reg_rtx (Pmode);
6493 create_output_operand (&op, target, Pmode);
6494 expand_insn (icode, 1, &op);
6495 return target;
6497 error ("%<__builtin_thread_pointer%> is not supported on this target");
6498 return const0_rtx;
6501 static void
6502 expand_builtin_set_thread_pointer (tree exp)
6504 enum insn_code icode;
6505 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6506 return;
6507 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6508 if (icode != CODE_FOR_nothing)
6510 class expand_operand op;
6511 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6512 Pmode, EXPAND_NORMAL);
6513 create_input_operand (&op, val, Pmode);
6514 expand_insn (icode, 1, &op);
6515 return;
6517 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6521 /* Emit code to restore the current value of stack. */
6523 static void
6524 expand_stack_restore (tree var)
6526 rtx_insn *prev;
6527 rtx sa = expand_normal (var);
6529 sa = convert_memory_address (Pmode, sa);
6531 prev = get_last_insn ();
6532 emit_stack_restore (SAVE_BLOCK, sa);
6534 record_new_stack_level ();
6536 fixup_args_size_notes (prev, get_last_insn (), 0);
6539 /* Emit code to save the current value of stack. */
6541 static rtx
6542 expand_stack_save (void)
6544 rtx ret = NULL_RTX;
6546 emit_stack_save (SAVE_BLOCK, &ret);
6547 return ret;
6550 /* Emit code to get the openacc gang, worker or vector id or size. */
6552 static rtx
6553 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6555 const char *name;
6556 rtx fallback_retval;
6557 rtx_insn *(*gen_fn) (rtx, rtx);
6558 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6560 case BUILT_IN_GOACC_PARLEVEL_ID:
6561 name = "__builtin_goacc_parlevel_id";
6562 fallback_retval = const0_rtx;
6563 gen_fn = targetm.gen_oacc_dim_pos;
6564 break;
6565 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6566 name = "__builtin_goacc_parlevel_size";
6567 fallback_retval = const1_rtx;
6568 gen_fn = targetm.gen_oacc_dim_size;
6569 break;
6570 default:
6571 gcc_unreachable ();
6574 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6576 error ("%qs only supported in OpenACC code", name);
6577 return const0_rtx;
6580 tree arg = CALL_EXPR_ARG (exp, 0);
6581 if (TREE_CODE (arg) != INTEGER_CST)
6583 error ("non-constant argument 0 to %qs", name);
6584 return const0_rtx;
6587 int dim = TREE_INT_CST_LOW (arg);
6588 switch (dim)
6590 case GOMP_DIM_GANG:
6591 case GOMP_DIM_WORKER:
6592 case GOMP_DIM_VECTOR:
6593 break;
6594 default:
6595 error ("illegal argument 0 to %qs", name);
6596 return const0_rtx;
6599 if (ignore)
6600 return target;
6602 if (target == NULL_RTX)
6603 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6605 if (!targetm.have_oacc_dim_size ())
6607 emit_move_insn (target, fallback_retval);
6608 return target;
6611 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6612 emit_insn (gen_fn (reg, GEN_INT (dim)));
6613 if (reg != target)
6614 emit_move_insn (target, reg);
6616 return target;
6619 /* Expand a string compare operation using a sequence of char comparison
6620 to get rid of the calling overhead, with result going to TARGET if
6621 that's convenient.
6623 VAR_STR is the variable string source;
6624 CONST_STR is the constant string source;
6625 LENGTH is the number of chars to compare;
6626 CONST_STR_N indicates which source string is the constant string;
6627 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6629 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6631 target = (int) (unsigned char) var_str[0]
6632 - (int) (unsigned char) const_str[0];
6633 if (target != 0)
6634 goto ne_label;
6636 target = (int) (unsigned char) var_str[length - 2]
6637 - (int) (unsigned char) const_str[length - 2];
6638 if (target != 0)
6639 goto ne_label;
6640 target = (int) (unsigned char) var_str[length - 1]
6641 - (int) (unsigned char) const_str[length - 1];
6642 ne_label:
6645 static rtx
6646 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6647 unsigned HOST_WIDE_INT length,
6648 int const_str_n, machine_mode mode)
6650 HOST_WIDE_INT offset = 0;
6651 rtx var_rtx_array
6652 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6653 rtx var_rtx = NULL_RTX;
6654 rtx const_rtx = NULL_RTX;
6655 rtx result = target ? target : gen_reg_rtx (mode);
6656 rtx_code_label *ne_label = gen_label_rtx ();
6657 tree unit_type_node = unsigned_char_type_node;
6658 scalar_int_mode unit_mode
6659 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6661 start_sequence ();
6663 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6665 var_rtx
6666 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6667 const_rtx = c_readstr (const_str + offset, unit_mode);
6668 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6669 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6671 op0 = convert_modes (mode, unit_mode, op0, 1);
6672 op1 = convert_modes (mode, unit_mode, op1, 1);
6673 result = expand_simple_binop (mode, MINUS, op0, op1,
6674 result, 1, OPTAB_WIDEN);
6675 if (i < length - 1)
6676 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6677 mode, true, ne_label);
6678 offset += GET_MODE_SIZE (unit_mode);
6681 emit_label (ne_label);
6682 rtx_insn *insns = get_insns ();
6683 end_sequence ();
6684 emit_insn (insns);
6686 return result;
6689 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6690 to TARGET if that's convenient.
6691 If the call is not been inlined, return NULL_RTX. */
6693 static rtx
6694 inline_expand_builtin_bytecmp (tree exp, rtx target)
6696 tree fndecl = get_callee_fndecl (exp);
6697 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6698 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6700 /* Do NOT apply this inlining expansion when optimizing for size or
6701 optimization level below 2. */
6702 if (optimize < 2 || optimize_insn_for_size_p ())
6703 return NULL_RTX;
6705 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6706 || fcode == BUILT_IN_STRNCMP
6707 || fcode == BUILT_IN_MEMCMP);
6709 /* On a target where the type of the call (int) has same or narrower presicion
6710 than unsigned char, give up the inlining expansion. */
6711 if (TYPE_PRECISION (unsigned_char_type_node)
6712 >= TYPE_PRECISION (TREE_TYPE (exp)))
6713 return NULL_RTX;
6715 tree arg1 = CALL_EXPR_ARG (exp, 0);
6716 tree arg2 = CALL_EXPR_ARG (exp, 1);
6717 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6719 unsigned HOST_WIDE_INT len1 = 0;
6720 unsigned HOST_WIDE_INT len2 = 0;
6721 unsigned HOST_WIDE_INT len3 = 0;
6723 /* Get the object representation of the initializers of ARG1 and ARG2
6724 as strings, provided they refer to constant objects, with their byte
6725 sizes in LEN1 and LEN2, respectively. */
6726 const char *bytes1 = getbyterep (arg1, &len1);
6727 const char *bytes2 = getbyterep (arg2, &len2);
6729 /* Fail if neither argument refers to an initialized constant. */
6730 if (!bytes1 && !bytes2)
6731 return NULL_RTX;
6733 if (is_ncmp)
6735 /* Fail if the memcmp/strncmp bound is not a constant. */
6736 if (!tree_fits_uhwi_p (len3_tree))
6737 return NULL_RTX;
6739 len3 = tree_to_uhwi (len3_tree);
6741 if (fcode == BUILT_IN_MEMCMP)
6743 /* Fail if the memcmp bound is greater than the size of either
6744 of the two constant objects. */
6745 if ((bytes1 && len1 < len3)
6746 || (bytes2 && len2 < len3))
6747 return NULL_RTX;
6751 if (fcode != BUILT_IN_MEMCMP)
6753 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6754 and LEN2 to the length of the nul-terminated string stored
6755 in each. */
6756 if (bytes1 != NULL)
6757 len1 = strnlen (bytes1, len1) + 1;
6758 if (bytes2 != NULL)
6759 len2 = strnlen (bytes2, len2) + 1;
6762 /* See inline_string_cmp. */
6763 int const_str_n;
6764 if (!len1)
6765 const_str_n = 2;
6766 else if (!len2)
6767 const_str_n = 1;
6768 else if (len2 > len1)
6769 const_str_n = 1;
6770 else
6771 const_str_n = 2;
6773 /* For strncmp only, compute the new bound as the smallest of
6774 the lengths of the two strings (plus 1) and the bound provided
6775 to the function. */
6776 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6777 if (is_ncmp && len3 < bound)
6778 bound = len3;
6780 /* If the bound of the comparison is larger than the threshold,
6781 do nothing. */
6782 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6783 return NULL_RTX;
6785 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6787 /* Now, start inline expansion the call. */
6788 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6789 (const_str_n == 1) ? bytes1 : bytes2, bound,
6790 const_str_n, mode);
6793 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6794 represents the size of the first argument to that call, or VOIDmode
6795 if the argument is a pointer. IGNORE will be true if the result
6796 isn't used. */
6797 static rtx
6798 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6799 bool ignore)
6801 rtx val, failsafe;
6802 unsigned nargs = call_expr_nargs (exp);
6804 tree arg0 = CALL_EXPR_ARG (exp, 0);
6806 if (mode == VOIDmode)
6808 mode = TYPE_MODE (TREE_TYPE (arg0));
6809 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6812 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6814 /* An optional second argument can be used as a failsafe value on
6815 some machines. If it isn't present, then the failsafe value is
6816 assumed to be 0. */
6817 if (nargs > 1)
6819 tree arg1 = CALL_EXPR_ARG (exp, 1);
6820 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6822 else
6823 failsafe = const0_rtx;
6825 /* If the result isn't used, the behavior is undefined. It would be
6826 nice to emit a warning here, but path splitting means this might
6827 happen with legitimate code. So simply drop the builtin
6828 expansion in that case; we've handled any side-effects above. */
6829 if (ignore)
6830 return const0_rtx;
6832 /* If we don't have a suitable target, create one to hold the result. */
6833 if (target == NULL || GET_MODE (target) != mode)
6834 target = gen_reg_rtx (mode);
6836 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6837 val = convert_modes (mode, VOIDmode, val, false);
6839 return targetm.speculation_safe_value (mode, target, val, failsafe);
6842 /* Expand an expression EXP that calls a built-in function,
6843 with result going to TARGET if that's convenient
6844 (and in mode MODE if that's convenient).
6845 SUBTARGET may be used as the target for computing one of EXP's operands.
6846 IGNORE is nonzero if the value is to be ignored. */
6849 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6850 int ignore)
6852 tree fndecl = get_callee_fndecl (exp);
6853 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6854 int flags;
6856 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6857 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6859 /* When ASan is enabled, we don't want to expand some memory/string
6860 builtins and rely on libsanitizer's hooks. This allows us to avoid
6861 redundant checks and be sure, that possible overflow will be detected
6862 by ASan. */
6864 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6865 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6866 return expand_call (exp, target, ignore);
6868 /* When not optimizing, generate calls to library functions for a certain
6869 set of builtins. */
6870 if (!optimize
6871 && !called_as_built_in (fndecl)
6872 && fcode != BUILT_IN_FORK
6873 && fcode != BUILT_IN_EXECL
6874 && fcode != BUILT_IN_EXECV
6875 && fcode != BUILT_IN_EXECLP
6876 && fcode != BUILT_IN_EXECLE
6877 && fcode != BUILT_IN_EXECVP
6878 && fcode != BUILT_IN_EXECVE
6879 && fcode != BUILT_IN_CLEAR_CACHE
6880 && !ALLOCA_FUNCTION_CODE_P (fcode)
6881 && fcode != BUILT_IN_FREE)
6882 return expand_call (exp, target, ignore);
6884 /* The built-in function expanders test for target == const0_rtx
6885 to determine whether the function's result will be ignored. */
6886 if (ignore)
6887 target = const0_rtx;
6889 /* If the result of a pure or const built-in function is ignored, and
6890 none of its arguments are volatile, we can avoid expanding the
6891 built-in call and just evaluate the arguments for side-effects. */
6892 if (target == const0_rtx
6893 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6894 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6896 bool volatilep = false;
6897 tree arg;
6898 call_expr_arg_iterator iter;
6900 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6901 if (TREE_THIS_VOLATILE (arg))
6903 volatilep = true;
6904 break;
6907 if (! volatilep)
6909 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6910 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6911 return const0_rtx;
6915 switch (fcode)
6917 CASE_FLT_FN (BUILT_IN_FABS):
6918 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6919 case BUILT_IN_FABSD32:
6920 case BUILT_IN_FABSD64:
6921 case BUILT_IN_FABSD128:
6922 target = expand_builtin_fabs (exp, target, subtarget);
6923 if (target)
6924 return target;
6925 break;
6927 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6928 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6929 target = expand_builtin_copysign (exp, target, subtarget);
6930 if (target)
6931 return target;
6932 break;
6934 /* Just do a normal library call if we were unable to fold
6935 the values. */
6936 CASE_FLT_FN (BUILT_IN_CABS):
6937 break;
6939 CASE_FLT_FN (BUILT_IN_FMA):
6940 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6941 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6942 if (target)
6943 return target;
6944 break;
6946 CASE_FLT_FN (BUILT_IN_ILOGB):
6947 if (! flag_unsafe_math_optimizations)
6948 break;
6949 gcc_fallthrough ();
6950 CASE_FLT_FN (BUILT_IN_ISINF):
6951 CASE_FLT_FN (BUILT_IN_FINITE):
6952 case BUILT_IN_ISFINITE:
6953 case BUILT_IN_ISNORMAL:
6954 target = expand_builtin_interclass_mathfn (exp, target);
6955 if (target)
6956 return target;
6957 break;
6959 CASE_FLT_FN (BUILT_IN_ICEIL):
6960 CASE_FLT_FN (BUILT_IN_LCEIL):
6961 CASE_FLT_FN (BUILT_IN_LLCEIL):
6962 CASE_FLT_FN (BUILT_IN_LFLOOR):
6963 CASE_FLT_FN (BUILT_IN_IFLOOR):
6964 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6965 target = expand_builtin_int_roundingfn (exp, target);
6966 if (target)
6967 return target;
6968 break;
6970 CASE_FLT_FN (BUILT_IN_IRINT):
6971 CASE_FLT_FN (BUILT_IN_LRINT):
6972 CASE_FLT_FN (BUILT_IN_LLRINT):
6973 CASE_FLT_FN (BUILT_IN_IROUND):
6974 CASE_FLT_FN (BUILT_IN_LROUND):
6975 CASE_FLT_FN (BUILT_IN_LLROUND):
6976 target = expand_builtin_int_roundingfn_2 (exp, target);
6977 if (target)
6978 return target;
6979 break;
6981 CASE_FLT_FN (BUILT_IN_POWI):
6982 target = expand_builtin_powi (exp, target);
6983 if (target)
6984 return target;
6985 break;
6987 CASE_FLT_FN (BUILT_IN_CEXPI):
6988 target = expand_builtin_cexpi (exp, target);
6989 gcc_assert (target);
6990 return target;
6992 CASE_FLT_FN (BUILT_IN_SIN):
6993 CASE_FLT_FN (BUILT_IN_COS):
6994 if (! flag_unsafe_math_optimizations)
6995 break;
6996 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6997 if (target)
6998 return target;
6999 break;
7001 CASE_FLT_FN (BUILT_IN_SINCOS):
7002 if (! flag_unsafe_math_optimizations)
7003 break;
7004 target = expand_builtin_sincos (exp);
7005 if (target)
7006 return target;
7007 break;
7009 case BUILT_IN_APPLY_ARGS:
7010 return expand_builtin_apply_args ();
7012 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7013 FUNCTION with a copy of the parameters described by
7014 ARGUMENTS, and ARGSIZE. It returns a block of memory
7015 allocated on the stack into which is stored all the registers
7016 that might possibly be used for returning the result of a
7017 function. ARGUMENTS is the value returned by
7018 __builtin_apply_args. ARGSIZE is the number of bytes of
7019 arguments that must be copied. ??? How should this value be
7020 computed? We'll also need a safe worst case value for varargs
7021 functions. */
7022 case BUILT_IN_APPLY:
7023 if (!validate_arglist (exp, POINTER_TYPE,
7024 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7025 && !validate_arglist (exp, REFERENCE_TYPE,
7026 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7027 return const0_rtx;
7028 else
7030 rtx ops[3];
7032 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7033 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7034 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7036 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7039 /* __builtin_return (RESULT) causes the function to return the
7040 value described by RESULT. RESULT is address of the block of
7041 memory returned by __builtin_apply. */
7042 case BUILT_IN_RETURN:
7043 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7044 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7045 return const0_rtx;
7047 case BUILT_IN_SAVEREGS:
7048 return expand_builtin_saveregs ();
7050 case BUILT_IN_VA_ARG_PACK:
7051 /* All valid uses of __builtin_va_arg_pack () are removed during
7052 inlining. */
7053 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7054 return const0_rtx;
7056 case BUILT_IN_VA_ARG_PACK_LEN:
7057 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7058 inlining. */
7059 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7060 return const0_rtx;
7062 /* Return the address of the first anonymous stack arg. */
7063 case BUILT_IN_NEXT_ARG:
7064 if (fold_builtin_next_arg (exp, false))
7065 return const0_rtx;
7066 return expand_builtin_next_arg ();
7068 case BUILT_IN_CLEAR_CACHE:
7069 expand_builtin___clear_cache (exp);
7070 return const0_rtx;
7072 case BUILT_IN_CLASSIFY_TYPE:
7073 return expand_builtin_classify_type (exp);
7075 case BUILT_IN_CONSTANT_P:
7076 return const0_rtx;
7078 case BUILT_IN_FRAME_ADDRESS:
7079 case BUILT_IN_RETURN_ADDRESS:
7080 return expand_builtin_frame_address (fndecl, exp);
7082 /* Returns the address of the area where the structure is returned.
7083 0 otherwise. */
7084 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7085 if (call_expr_nargs (exp) != 0
7086 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7087 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7088 return const0_rtx;
7089 else
7090 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7092 CASE_BUILT_IN_ALLOCA:
7093 target = expand_builtin_alloca (exp);
7094 if (target)
7095 return target;
7096 break;
7098 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7099 return expand_asan_emit_allocas_unpoison (exp);
7101 case BUILT_IN_STACK_SAVE:
7102 return expand_stack_save ();
7104 case BUILT_IN_STACK_RESTORE:
7105 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7106 return const0_rtx;
7108 case BUILT_IN_BSWAP16:
7109 case BUILT_IN_BSWAP32:
7110 case BUILT_IN_BSWAP64:
7111 case BUILT_IN_BSWAP128:
7112 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7113 if (target)
7114 return target;
7115 break;
7117 CASE_INT_FN (BUILT_IN_FFS):
7118 target = expand_builtin_unop (target_mode, exp, target,
7119 subtarget, ffs_optab);
7120 if (target)
7121 return target;
7122 break;
7124 CASE_INT_FN (BUILT_IN_CLZ):
7125 target = expand_builtin_unop (target_mode, exp, target,
7126 subtarget, clz_optab);
7127 if (target)
7128 return target;
7129 break;
7131 CASE_INT_FN (BUILT_IN_CTZ):
7132 target = expand_builtin_unop (target_mode, exp, target,
7133 subtarget, ctz_optab);
7134 if (target)
7135 return target;
7136 break;
7138 CASE_INT_FN (BUILT_IN_CLRSB):
7139 target = expand_builtin_unop (target_mode, exp, target,
7140 subtarget, clrsb_optab);
7141 if (target)
7142 return target;
7143 break;
7145 CASE_INT_FN (BUILT_IN_POPCOUNT):
7146 target = expand_builtin_unop (target_mode, exp, target,
7147 subtarget, popcount_optab);
7148 if (target)
7149 return target;
7150 break;
7152 CASE_INT_FN (BUILT_IN_PARITY):
7153 target = expand_builtin_unop (target_mode, exp, target,
7154 subtarget, parity_optab);
7155 if (target)
7156 return target;
7157 break;
7159 case BUILT_IN_STRLEN:
7160 target = expand_builtin_strlen (exp, target, target_mode);
7161 if (target)
7162 return target;
7163 break;
7165 case BUILT_IN_STRNLEN:
7166 target = expand_builtin_strnlen (exp, target, target_mode);
7167 if (target)
7168 return target;
7169 break;
7171 case BUILT_IN_STRCPY:
7172 target = expand_builtin_strcpy (exp, target);
7173 if (target)
7174 return target;
7175 break;
7177 case BUILT_IN_STRNCPY:
7178 target = expand_builtin_strncpy (exp, target);
7179 if (target)
7180 return target;
7181 break;
7183 case BUILT_IN_STPCPY:
7184 target = expand_builtin_stpcpy (exp, target, mode);
7185 if (target)
7186 return target;
7187 break;
7189 case BUILT_IN_MEMCPY:
7190 target = expand_builtin_memcpy (exp, target);
7191 if (target)
7192 return target;
7193 break;
7195 case BUILT_IN_MEMMOVE:
7196 target = expand_builtin_memmove (exp, target);
7197 if (target)
7198 return target;
7199 break;
7201 case BUILT_IN_MEMPCPY:
7202 target = expand_builtin_mempcpy (exp, target);
7203 if (target)
7204 return target;
7205 break;
7207 case BUILT_IN_MEMSET:
7208 target = expand_builtin_memset (exp, target, mode);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_BZERO:
7214 target = expand_builtin_bzero (exp);
7215 if (target)
7216 return target;
7217 break;
7219 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7220 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7221 when changing it to a strcmp call. */
7222 case BUILT_IN_STRCMP_EQ:
7223 target = expand_builtin_memcmp (exp, target, true);
7224 if (target)
7225 return target;
7227 /* Change this call back to a BUILT_IN_STRCMP. */
7228 TREE_OPERAND (exp, 1)
7229 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7231 /* Delete the last parameter. */
7232 unsigned int i;
7233 vec<tree, va_gc> *arg_vec;
7234 vec_alloc (arg_vec, 2);
7235 for (i = 0; i < 2; i++)
7236 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7237 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7238 /* FALLTHROUGH */
7240 case BUILT_IN_STRCMP:
7241 target = expand_builtin_strcmp (exp, target);
7242 if (target)
7243 return target;
7244 break;
7246 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7247 back to a BUILT_IN_STRNCMP. */
7248 case BUILT_IN_STRNCMP_EQ:
7249 target = expand_builtin_memcmp (exp, target, true);
7250 if (target)
7251 return target;
7253 /* Change it back to a BUILT_IN_STRNCMP. */
7254 TREE_OPERAND (exp, 1)
7255 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7256 /* FALLTHROUGH */
7258 case BUILT_IN_STRNCMP:
7259 target = expand_builtin_strncmp (exp, target, mode);
7260 if (target)
7261 return target;
7262 break;
7264 case BUILT_IN_BCMP:
7265 case BUILT_IN_MEMCMP:
7266 case BUILT_IN_MEMCMP_EQ:
7267 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7268 if (target)
7269 return target;
7270 if (fcode == BUILT_IN_MEMCMP_EQ)
7272 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7273 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7275 break;
7277 case BUILT_IN_SETJMP:
7278 /* This should have been lowered to the builtins below. */
7279 gcc_unreachable ();
7281 case BUILT_IN_SETJMP_SETUP:
7282 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7283 and the receiver label. */
7284 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7286 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7287 VOIDmode, EXPAND_NORMAL);
7288 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7289 rtx_insn *label_r = label_rtx (label);
7291 /* This is copied from the handling of non-local gotos. */
7292 expand_builtin_setjmp_setup (buf_addr, label_r);
7293 nonlocal_goto_handler_labels
7294 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7295 nonlocal_goto_handler_labels);
7296 /* ??? Do not let expand_label treat us as such since we would
7297 not want to be both on the list of non-local labels and on
7298 the list of forced labels. */
7299 FORCED_LABEL (label) = 0;
7300 return const0_rtx;
7302 break;
7304 case BUILT_IN_SETJMP_RECEIVER:
7305 /* __builtin_setjmp_receiver is passed the receiver label. */
7306 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7308 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7309 rtx_insn *label_r = label_rtx (label);
7311 expand_builtin_setjmp_receiver (label_r);
7312 return const0_rtx;
7314 break;
7316 /* __builtin_longjmp is passed a pointer to an array of five words.
7317 It's similar to the C library longjmp function but works with
7318 __builtin_setjmp above. */
7319 case BUILT_IN_LONGJMP:
7320 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7322 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7323 VOIDmode, EXPAND_NORMAL);
7324 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7326 if (value != const1_rtx)
7328 error ("%<__builtin_longjmp%> second argument must be 1");
7329 return const0_rtx;
7332 expand_builtin_longjmp (buf_addr, value);
7333 return const0_rtx;
7335 break;
7337 case BUILT_IN_NONLOCAL_GOTO:
7338 target = expand_builtin_nonlocal_goto (exp);
7339 if (target)
7340 return target;
7341 break;
7343 /* This updates the setjmp buffer that is its argument with the value
7344 of the current stack pointer. */
7345 case BUILT_IN_UPDATE_SETJMP_BUF:
7346 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7348 rtx buf_addr
7349 = expand_normal (CALL_EXPR_ARG (exp, 0));
7351 expand_builtin_update_setjmp_buf (buf_addr);
7352 return const0_rtx;
7354 break;
7356 case BUILT_IN_TRAP:
7357 expand_builtin_trap ();
7358 return const0_rtx;
7360 case BUILT_IN_UNREACHABLE:
7361 expand_builtin_unreachable ();
7362 return const0_rtx;
7364 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7365 case BUILT_IN_SIGNBITD32:
7366 case BUILT_IN_SIGNBITD64:
7367 case BUILT_IN_SIGNBITD128:
7368 target = expand_builtin_signbit (exp, target);
7369 if (target)
7370 return target;
7371 break;
7373 /* Various hooks for the DWARF 2 __throw routine. */
7374 case BUILT_IN_UNWIND_INIT:
7375 expand_builtin_unwind_init ();
7376 return const0_rtx;
7377 case BUILT_IN_DWARF_CFA:
7378 return virtual_cfa_rtx;
7379 #ifdef DWARF2_UNWIND_INFO
7380 case BUILT_IN_DWARF_SP_COLUMN:
7381 return expand_builtin_dwarf_sp_column ();
7382 case BUILT_IN_INIT_DWARF_REG_SIZES:
7383 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7384 return const0_rtx;
7385 #endif
7386 case BUILT_IN_FROB_RETURN_ADDR:
7387 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7388 case BUILT_IN_EXTRACT_RETURN_ADDR:
7389 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7390 case BUILT_IN_EH_RETURN:
7391 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7392 CALL_EXPR_ARG (exp, 1));
7393 return const0_rtx;
7394 case BUILT_IN_EH_RETURN_DATA_REGNO:
7395 return expand_builtin_eh_return_data_regno (exp);
7396 case BUILT_IN_EXTEND_POINTER:
7397 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7398 case BUILT_IN_EH_POINTER:
7399 return expand_builtin_eh_pointer (exp);
7400 case BUILT_IN_EH_FILTER:
7401 return expand_builtin_eh_filter (exp);
7402 case BUILT_IN_EH_COPY_VALUES:
7403 return expand_builtin_eh_copy_values (exp);
7405 case BUILT_IN_VA_START:
7406 return expand_builtin_va_start (exp);
7407 case BUILT_IN_VA_END:
7408 return expand_builtin_va_end (exp);
7409 case BUILT_IN_VA_COPY:
7410 return expand_builtin_va_copy (exp);
7411 case BUILT_IN_EXPECT:
7412 return expand_builtin_expect (exp, target);
7413 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7414 return expand_builtin_expect_with_probability (exp, target);
7415 case BUILT_IN_ASSUME_ALIGNED:
7416 return expand_builtin_assume_aligned (exp, target);
7417 case BUILT_IN_PREFETCH:
7418 expand_builtin_prefetch (exp);
7419 return const0_rtx;
7421 case BUILT_IN_INIT_TRAMPOLINE:
7422 return expand_builtin_init_trampoline (exp, true);
7423 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7424 return expand_builtin_init_trampoline (exp, false);
7425 case BUILT_IN_ADJUST_TRAMPOLINE:
7426 return expand_builtin_adjust_trampoline (exp);
7428 case BUILT_IN_INIT_DESCRIPTOR:
7429 return expand_builtin_init_descriptor (exp);
7430 case BUILT_IN_ADJUST_DESCRIPTOR:
7431 return expand_builtin_adjust_descriptor (exp);
7433 case BUILT_IN_FORK:
7434 case BUILT_IN_EXECL:
7435 case BUILT_IN_EXECV:
7436 case BUILT_IN_EXECLP:
7437 case BUILT_IN_EXECLE:
7438 case BUILT_IN_EXECVP:
7439 case BUILT_IN_EXECVE:
7440 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7441 if (target)
7442 return target;
7443 break;
7445 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7446 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7447 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7448 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7449 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7451 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7452 if (target)
7453 return target;
7454 break;
7456 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7457 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7458 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7459 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7460 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7462 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7463 if (target)
7464 return target;
7465 break;
7467 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7468 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7469 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7470 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7471 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7473 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7474 if (target)
7475 return target;
7476 break;
7478 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7479 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7480 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7481 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7482 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7483 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7484 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7485 if (target)
7486 return target;
7487 break;
7489 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7490 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7491 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7492 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7493 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7494 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7495 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7496 if (target)
7497 return target;
7498 break;
7500 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7501 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7502 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7503 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7504 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7505 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7506 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7507 if (target)
7508 return target;
7509 break;
7511 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7512 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7513 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7514 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7515 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7517 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7518 if (target)
7519 return target;
7520 break;
7522 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7523 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7524 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7525 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7526 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7528 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7529 if (target)
7530 return target;
7531 break;
7533 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7534 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7535 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7536 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7537 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7538 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7539 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7540 if (target)
7541 return target;
7542 break;
7544 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7545 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7546 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7547 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7548 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7549 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7550 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7551 if (target)
7552 return target;
7553 break;
7555 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7556 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7557 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7558 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7559 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7560 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7561 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7562 if (target)
7563 return target;
7564 break;
7566 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7567 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7568 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7569 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7570 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7571 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7572 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7573 if (target)
7574 return target;
7575 break;
7577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7578 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7579 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7580 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7581 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7582 if (mode == VOIDmode)
7583 mode = TYPE_MODE (boolean_type_node);
7584 if (!target || !register_operand (target, mode))
7585 target = gen_reg_rtx (mode);
7587 mode = get_builtin_sync_mode
7588 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7589 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7590 if (target)
7591 return target;
7592 break;
7594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7597 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7598 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7599 mode = get_builtin_sync_mode
7600 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7601 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7602 if (target)
7603 return target;
7604 break;
7606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7607 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7608 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7609 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7610 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7611 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7612 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7613 if (target)
7614 return target;
7615 break;
7617 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7618 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7619 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7620 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7621 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7622 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7623 expand_builtin_sync_lock_release (mode, exp);
7624 return const0_rtx;
7626 case BUILT_IN_SYNC_SYNCHRONIZE:
7627 expand_builtin_sync_synchronize ();
7628 return const0_rtx;
7630 case BUILT_IN_ATOMIC_EXCHANGE_1:
7631 case BUILT_IN_ATOMIC_EXCHANGE_2:
7632 case BUILT_IN_ATOMIC_EXCHANGE_4:
7633 case BUILT_IN_ATOMIC_EXCHANGE_8:
7634 case BUILT_IN_ATOMIC_EXCHANGE_16:
7635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7636 target = expand_builtin_atomic_exchange (mode, exp, target);
7637 if (target)
7638 return target;
7639 break;
7641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7642 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7643 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7644 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7647 unsigned int nargs, z;
7648 vec<tree, va_gc> *vec;
7650 mode =
7651 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7652 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7653 if (target)
7654 return target;
7656 /* If this is turned into an external library call, the weak parameter
7657 must be dropped to match the expected parameter list. */
7658 nargs = call_expr_nargs (exp);
7659 vec_alloc (vec, nargs - 1);
7660 for (z = 0; z < 3; z++)
7661 vec->quick_push (CALL_EXPR_ARG (exp, z));
7662 /* Skip the boolean weak parameter. */
7663 for (z = 4; z < 6; z++)
7664 vec->quick_push (CALL_EXPR_ARG (exp, z));
7665 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7666 break;
7669 case BUILT_IN_ATOMIC_LOAD_1:
7670 case BUILT_IN_ATOMIC_LOAD_2:
7671 case BUILT_IN_ATOMIC_LOAD_4:
7672 case BUILT_IN_ATOMIC_LOAD_8:
7673 case BUILT_IN_ATOMIC_LOAD_16:
7674 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7675 target = expand_builtin_atomic_load (mode, exp, target);
7676 if (target)
7677 return target;
7678 break;
7680 case BUILT_IN_ATOMIC_STORE_1:
7681 case BUILT_IN_ATOMIC_STORE_2:
7682 case BUILT_IN_ATOMIC_STORE_4:
7683 case BUILT_IN_ATOMIC_STORE_8:
7684 case BUILT_IN_ATOMIC_STORE_16:
7685 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7686 target = expand_builtin_atomic_store (mode, exp);
7687 if (target)
7688 return const0_rtx;
7689 break;
7691 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7692 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7693 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7694 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7695 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7697 enum built_in_function lib;
7698 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7699 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7700 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7701 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7702 ignore, lib);
7703 if (target)
7704 return target;
7705 break;
7707 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7708 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7709 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7710 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7711 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7713 enum built_in_function lib;
7714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7715 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7716 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7717 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7718 ignore, lib);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_ATOMIC_AND_FETCH_1:
7724 case BUILT_IN_ATOMIC_AND_FETCH_2:
7725 case BUILT_IN_ATOMIC_AND_FETCH_4:
7726 case BUILT_IN_ATOMIC_AND_FETCH_8:
7727 case BUILT_IN_ATOMIC_AND_FETCH_16:
7729 enum built_in_function lib;
7730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7731 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7732 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7733 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7734 ignore, lib);
7735 if (target)
7736 return target;
7737 break;
7739 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7740 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7741 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7742 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7743 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7745 enum built_in_function lib;
7746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7747 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7748 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7749 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7750 ignore, lib);
7751 if (target)
7752 return target;
7753 break;
7755 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7756 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7757 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7758 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7759 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7761 enum built_in_function lib;
7762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7763 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7764 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7765 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7766 ignore, lib);
7767 if (target)
7768 return target;
7769 break;
7771 case BUILT_IN_ATOMIC_OR_FETCH_1:
7772 case BUILT_IN_ATOMIC_OR_FETCH_2:
7773 case BUILT_IN_ATOMIC_OR_FETCH_4:
7774 case BUILT_IN_ATOMIC_OR_FETCH_8:
7775 case BUILT_IN_ATOMIC_OR_FETCH_16:
7777 enum built_in_function lib;
7778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7779 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7780 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7781 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7782 ignore, lib);
7783 if (target)
7784 return target;
7785 break;
7787 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7788 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7789 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7790 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7791 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7792 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7793 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7794 ignore, BUILT_IN_NONE);
7795 if (target)
7796 return target;
7797 break;
7799 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7800 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7801 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7802 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7803 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7805 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7806 ignore, BUILT_IN_NONE);
7807 if (target)
7808 return target;
7809 break;
7811 case BUILT_IN_ATOMIC_FETCH_AND_1:
7812 case BUILT_IN_ATOMIC_FETCH_AND_2:
7813 case BUILT_IN_ATOMIC_FETCH_AND_4:
7814 case BUILT_IN_ATOMIC_FETCH_AND_8:
7815 case BUILT_IN_ATOMIC_FETCH_AND_16:
7816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7817 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7818 ignore, BUILT_IN_NONE);
7819 if (target)
7820 return target;
7821 break;
7823 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7824 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7825 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7826 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7827 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7829 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7830 ignore, BUILT_IN_NONE);
7831 if (target)
7832 return target;
7833 break;
7835 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7836 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7837 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7838 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7839 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7840 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7841 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7842 ignore, BUILT_IN_NONE);
7843 if (target)
7844 return target;
7845 break;
7847 case BUILT_IN_ATOMIC_FETCH_OR_1:
7848 case BUILT_IN_ATOMIC_FETCH_OR_2:
7849 case BUILT_IN_ATOMIC_FETCH_OR_4:
7850 case BUILT_IN_ATOMIC_FETCH_OR_8:
7851 case BUILT_IN_ATOMIC_FETCH_OR_16:
7852 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7853 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7854 ignore, BUILT_IN_NONE);
7855 if (target)
7856 return target;
7857 break;
7859 case BUILT_IN_ATOMIC_TEST_AND_SET:
7860 return expand_builtin_atomic_test_and_set (exp, target);
7862 case BUILT_IN_ATOMIC_CLEAR:
7863 return expand_builtin_atomic_clear (exp);
7865 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7866 return expand_builtin_atomic_always_lock_free (exp);
7868 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7869 target = expand_builtin_atomic_is_lock_free (exp);
7870 if (target)
7871 return target;
7872 break;
7874 case BUILT_IN_ATOMIC_THREAD_FENCE:
7875 expand_builtin_atomic_thread_fence (exp);
7876 return const0_rtx;
7878 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7879 expand_builtin_atomic_signal_fence (exp);
7880 return const0_rtx;
7882 case BUILT_IN_OBJECT_SIZE:
7883 return expand_builtin_object_size (exp);
7885 case BUILT_IN_MEMCPY_CHK:
7886 case BUILT_IN_MEMPCPY_CHK:
7887 case BUILT_IN_MEMMOVE_CHK:
7888 case BUILT_IN_MEMSET_CHK:
7889 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7890 if (target)
7891 return target;
7892 break;
7894 case BUILT_IN_STRCPY_CHK:
7895 case BUILT_IN_STPCPY_CHK:
7896 case BUILT_IN_STRNCPY_CHK:
7897 case BUILT_IN_STPNCPY_CHK:
7898 case BUILT_IN_STRCAT_CHK:
7899 case BUILT_IN_STRNCAT_CHK:
7900 case BUILT_IN_SNPRINTF_CHK:
7901 case BUILT_IN_VSNPRINTF_CHK:
7902 maybe_emit_chk_warning (exp, fcode);
7903 break;
7905 case BUILT_IN_SPRINTF_CHK:
7906 case BUILT_IN_VSPRINTF_CHK:
7907 maybe_emit_sprintf_chk_warning (exp, fcode);
7908 break;
7910 case BUILT_IN_THREAD_POINTER:
7911 return expand_builtin_thread_pointer (exp, target);
7913 case BUILT_IN_SET_THREAD_POINTER:
7914 expand_builtin_set_thread_pointer (exp);
7915 return const0_rtx;
7917 case BUILT_IN_ACC_ON_DEVICE:
7918 /* Do library call, if we failed to expand the builtin when
7919 folding. */
7920 break;
7922 case BUILT_IN_GOACC_PARLEVEL_ID:
7923 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7924 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7926 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
7927 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
7929 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
7930 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
7931 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
7932 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
7933 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
7935 return expand_speculation_safe_value (mode, exp, target, ignore);
7937 default: /* just do library call, if unknown builtin */
7938 break;
7941 /* The switch statement above can drop through to cause the function
7942 to be called normally. */
7943 return expand_call (exp, target, ignore);
7946 /* Determine whether a tree node represents a call to a built-in
7947 function. If the tree T is a call to a built-in function with
7948 the right number of arguments of the appropriate types, return
7949 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7950 Otherwise the return value is END_BUILTINS. */
7952 enum built_in_function
7953 builtin_mathfn_code (const_tree t)
7955 const_tree fndecl, arg, parmlist;
7956 const_tree argtype, parmtype;
7957 const_call_expr_arg_iterator iter;
7959 if (TREE_CODE (t) != CALL_EXPR)
7960 return END_BUILTINS;
7962 fndecl = get_callee_fndecl (t);
7963 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7964 return END_BUILTINS;
7966 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7967 init_const_call_expr_arg_iterator (t, &iter);
7968 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7970 /* If a function doesn't take a variable number of arguments,
7971 the last element in the list will have type `void'. */
7972 parmtype = TREE_VALUE (parmlist);
7973 if (VOID_TYPE_P (parmtype))
7975 if (more_const_call_expr_args_p (&iter))
7976 return END_BUILTINS;
7977 return DECL_FUNCTION_CODE (fndecl);
7980 if (! more_const_call_expr_args_p (&iter))
7981 return END_BUILTINS;
7983 arg = next_const_call_expr_arg (&iter);
7984 argtype = TREE_TYPE (arg);
7986 if (SCALAR_FLOAT_TYPE_P (parmtype))
7988 if (! SCALAR_FLOAT_TYPE_P (argtype))
7989 return END_BUILTINS;
7991 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7993 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7994 return END_BUILTINS;
7996 else if (POINTER_TYPE_P (parmtype))
7998 if (! POINTER_TYPE_P (argtype))
7999 return END_BUILTINS;
8001 else if (INTEGRAL_TYPE_P (parmtype))
8003 if (! INTEGRAL_TYPE_P (argtype))
8004 return END_BUILTINS;
8006 else
8007 return END_BUILTINS;
8010 /* Variable-length argument list. */
8011 return DECL_FUNCTION_CODE (fndecl);
8014 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8015 evaluate to a constant. */
8017 static tree
8018 fold_builtin_constant_p (tree arg)
8020 /* We return 1 for a numeric type that's known to be a constant
8021 value at compile-time or for an aggregate type that's a
8022 literal constant. */
8023 STRIP_NOPS (arg);
8025 /* If we know this is a constant, emit the constant of one. */
8026 if (CONSTANT_CLASS_P (arg)
8027 || (TREE_CODE (arg) == CONSTRUCTOR
8028 && TREE_CONSTANT (arg)))
8029 return integer_one_node;
8030 if (TREE_CODE (arg) == ADDR_EXPR)
8032 tree op = TREE_OPERAND (arg, 0);
8033 if (TREE_CODE (op) == STRING_CST
8034 || (TREE_CODE (op) == ARRAY_REF
8035 && integer_zerop (TREE_OPERAND (op, 1))
8036 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8037 return integer_one_node;
8040 /* If this expression has side effects, show we don't know it to be a
8041 constant. Likewise if it's a pointer or aggregate type since in
8042 those case we only want literals, since those are only optimized
8043 when generating RTL, not later.
8044 And finally, if we are compiling an initializer, not code, we
8045 need to return a definite result now; there's not going to be any
8046 more optimization done. */
8047 if (TREE_SIDE_EFFECTS (arg)
8048 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8049 || POINTER_TYPE_P (TREE_TYPE (arg))
8050 || cfun == 0
8051 || folding_initializer
8052 || force_folding_builtin_constant_p)
8053 return integer_zero_node;
8055 return NULL_TREE;
8058 /* Create builtin_expect or builtin_expect_with_probability
8059 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8060 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8061 builtin_expect_with_probability instead uses third argument as PROBABILITY
8062 value. */
8064 static tree
8065 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8066 tree predictor, tree probability)
8068 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8070 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8071 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8072 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8073 ret_type = TREE_TYPE (TREE_TYPE (fn));
8074 pred_type = TREE_VALUE (arg_types);
8075 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8077 pred = fold_convert_loc (loc, pred_type, pred);
8078 expected = fold_convert_loc (loc, expected_type, expected);
8080 if (probability)
8081 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8082 else
8083 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8084 predictor);
8086 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8087 build_int_cst (ret_type, 0));
8090 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8091 NULL_TREE if no simplification is possible. */
8093 tree
8094 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8095 tree arg3)
8097 tree inner, fndecl, inner_arg0;
8098 enum tree_code code;
8100 /* Distribute the expected value over short-circuiting operators.
8101 See through the cast from truthvalue_type_node to long. */
8102 inner_arg0 = arg0;
8103 while (CONVERT_EXPR_P (inner_arg0)
8104 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8105 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8106 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8108 /* If this is a builtin_expect within a builtin_expect keep the
8109 inner one. See through a comparison against a constant. It
8110 might have been added to create a thruthvalue. */
8111 inner = inner_arg0;
8113 if (COMPARISON_CLASS_P (inner)
8114 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8115 inner = TREE_OPERAND (inner, 0);
8117 if (TREE_CODE (inner) == CALL_EXPR
8118 && (fndecl = get_callee_fndecl (inner))
8119 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8120 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8121 return arg0;
8123 inner = inner_arg0;
8124 code = TREE_CODE (inner);
8125 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8127 tree op0 = TREE_OPERAND (inner, 0);
8128 tree op1 = TREE_OPERAND (inner, 1);
8129 arg1 = save_expr (arg1);
8131 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8132 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8133 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8135 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8138 /* If the argument isn't invariant then there's nothing else we can do. */
8139 if (!TREE_CONSTANT (inner_arg0))
8140 return NULL_TREE;
8142 /* If we expect that a comparison against the argument will fold to
8143 a constant return the constant. In practice, this means a true
8144 constant or the address of a non-weak symbol. */
8145 inner = inner_arg0;
8146 STRIP_NOPS (inner);
8147 if (TREE_CODE (inner) == ADDR_EXPR)
8151 inner = TREE_OPERAND (inner, 0);
8153 while (TREE_CODE (inner) == COMPONENT_REF
8154 || TREE_CODE (inner) == ARRAY_REF);
8155 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8156 return NULL_TREE;
8159 /* Otherwise, ARG0 already has the proper type for the return value. */
8160 return arg0;
8163 /* Fold a call to __builtin_classify_type with argument ARG. */
8165 static tree
8166 fold_builtin_classify_type (tree arg)
8168 if (arg == 0)
8169 return build_int_cst (integer_type_node, no_type_class);
8171 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8174 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8175 ARG. */
8177 static tree
8178 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8180 if (!validate_arg (arg, POINTER_TYPE))
8181 return NULL_TREE;
8182 else
8184 c_strlen_data lendata = { };
8185 tree len = c_strlen (arg, 0, &lendata);
8187 if (len)
8188 return fold_convert_loc (loc, type, len);
8190 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8191 also early enough to detect invalid reads in multimensional
8192 arrays and struct members. */
8193 if (!lendata.decl)
8194 c_strlen (arg, 1, &lendata);
8196 if (lendata.decl)
8198 if (EXPR_HAS_LOCATION (arg))
8199 loc = EXPR_LOCATION (arg);
8200 else if (loc == UNKNOWN_LOCATION)
8201 loc = input_location;
8202 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8205 return NULL_TREE;
8209 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8211 static tree
8212 fold_builtin_inf (location_t loc, tree type, int warn)
8214 REAL_VALUE_TYPE real;
8216 /* __builtin_inff is intended to be usable to define INFINITY on all
8217 targets. If an infinity is not available, INFINITY expands "to a
8218 positive constant of type float that overflows at translation
8219 time", footnote "In this case, using INFINITY will violate the
8220 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8221 Thus we pedwarn to ensure this constraint violation is
8222 diagnosed. */
8223 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8224 pedwarn (loc, 0, "target format does not support infinity");
8226 real_inf (&real);
8227 return build_real (type, real);
8230 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8231 NULL_TREE if no simplification can be made. */
8233 static tree
8234 fold_builtin_sincos (location_t loc,
8235 tree arg0, tree arg1, tree arg2)
8237 tree type;
8238 tree fndecl, call = NULL_TREE;
8240 if (!validate_arg (arg0, REAL_TYPE)
8241 || !validate_arg (arg1, POINTER_TYPE)
8242 || !validate_arg (arg2, POINTER_TYPE))
8243 return NULL_TREE;
8245 type = TREE_TYPE (arg0);
8247 /* Calculate the result when the argument is a constant. */
8248 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8249 if (fn == END_BUILTINS)
8250 return NULL_TREE;
8252 /* Canonicalize sincos to cexpi. */
8253 if (TREE_CODE (arg0) == REAL_CST)
8255 tree complex_type = build_complex_type (type);
8256 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8258 if (!call)
8260 if (!targetm.libc_has_function (function_c99_math_complex, type)
8261 || !builtin_decl_implicit_p (fn))
8262 return NULL_TREE;
8263 fndecl = builtin_decl_explicit (fn);
8264 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8265 call = builtin_save_expr (call);
8268 tree ptype = build_pointer_type (type);
8269 arg1 = fold_convert (ptype, arg1);
8270 arg2 = fold_convert (ptype, arg2);
8271 return build2 (COMPOUND_EXPR, void_type_node,
8272 build2 (MODIFY_EXPR, void_type_node,
8273 build_fold_indirect_ref_loc (loc, arg1),
8274 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8275 build2 (MODIFY_EXPR, void_type_node,
8276 build_fold_indirect_ref_loc (loc, arg2),
8277 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8280 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8281 Return NULL_TREE if no simplification can be made. */
8283 static tree
8284 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8286 if (!validate_arg (arg1, POINTER_TYPE)
8287 || !validate_arg (arg2, POINTER_TYPE)
8288 || !validate_arg (len, INTEGER_TYPE))
8289 return NULL_TREE;
8291 /* If the LEN parameter is zero, return zero. */
8292 if (integer_zerop (len))
8293 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8294 arg1, arg2);
8296 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8297 if (operand_equal_p (arg1, arg2, 0))
8298 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8300 /* If len parameter is one, return an expression corresponding to
8301 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8302 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8304 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8305 tree cst_uchar_ptr_node
8306 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8308 tree ind1
8309 = fold_convert_loc (loc, integer_type_node,
8310 build1 (INDIRECT_REF, cst_uchar_node,
8311 fold_convert_loc (loc,
8312 cst_uchar_ptr_node,
8313 arg1)));
8314 tree ind2
8315 = fold_convert_loc (loc, integer_type_node,
8316 build1 (INDIRECT_REF, cst_uchar_node,
8317 fold_convert_loc (loc,
8318 cst_uchar_ptr_node,
8319 arg2)));
8320 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8323 return NULL_TREE;
8326 /* Fold a call to builtin isascii with argument ARG. */
8328 static tree
8329 fold_builtin_isascii (location_t loc, tree arg)
8331 if (!validate_arg (arg, INTEGER_TYPE))
8332 return NULL_TREE;
8333 else
8335 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8336 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8337 build_int_cst (integer_type_node,
8338 ~ (unsigned HOST_WIDE_INT) 0x7f));
8339 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8340 arg, integer_zero_node);
8344 /* Fold a call to builtin toascii with argument ARG. */
8346 static tree
8347 fold_builtin_toascii (location_t loc, tree arg)
8349 if (!validate_arg (arg, INTEGER_TYPE))
8350 return NULL_TREE;
8352 /* Transform toascii(c) -> (c & 0x7f). */
8353 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8354 build_int_cst (integer_type_node, 0x7f));
8357 /* Fold a call to builtin isdigit with argument ARG. */
8359 static tree
8360 fold_builtin_isdigit (location_t loc, tree arg)
8362 if (!validate_arg (arg, INTEGER_TYPE))
8363 return NULL_TREE;
8364 else
8366 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8367 /* According to the C standard, isdigit is unaffected by locale.
8368 However, it definitely is affected by the target character set. */
8369 unsigned HOST_WIDE_INT target_digit0
8370 = lang_hooks.to_target_charset ('0');
8372 if (target_digit0 == 0)
8373 return NULL_TREE;
8375 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8376 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8377 build_int_cst (unsigned_type_node, target_digit0));
8378 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8379 build_int_cst (unsigned_type_node, 9));
8383 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8385 static tree
8386 fold_builtin_fabs (location_t loc, tree arg, tree type)
8388 if (!validate_arg (arg, REAL_TYPE))
8389 return NULL_TREE;
8391 arg = fold_convert_loc (loc, type, arg);
8392 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8395 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8397 static tree
8398 fold_builtin_abs (location_t loc, tree arg, tree type)
8400 if (!validate_arg (arg, INTEGER_TYPE))
8401 return NULL_TREE;
8403 arg = fold_convert_loc (loc, type, arg);
8404 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8407 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8409 static tree
8410 fold_builtin_carg (location_t loc, tree arg, tree type)
8412 if (validate_arg (arg, COMPLEX_TYPE)
8413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8415 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8417 if (atan2_fn)
8419 tree new_arg = builtin_save_expr (arg);
8420 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8421 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8422 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8426 return NULL_TREE;
8429 /* Fold a call to builtin frexp, we can assume the base is 2. */
8431 static tree
8432 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8434 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8435 return NULL_TREE;
8437 STRIP_NOPS (arg0);
8439 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8440 return NULL_TREE;
8442 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8444 /* Proceed if a valid pointer type was passed in. */
8445 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8447 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8448 tree frac, exp;
8450 switch (value->cl)
8452 case rvc_zero:
8453 /* For +-0, return (*exp = 0, +-0). */
8454 exp = integer_zero_node;
8455 frac = arg0;
8456 break;
8457 case rvc_nan:
8458 case rvc_inf:
8459 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8460 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8461 case rvc_normal:
8463 /* Since the frexp function always expects base 2, and in
8464 GCC normalized significands are already in the range
8465 [0.5, 1.0), we have exactly what frexp wants. */
8466 REAL_VALUE_TYPE frac_rvt = *value;
8467 SET_REAL_EXP (&frac_rvt, 0);
8468 frac = build_real (rettype, frac_rvt);
8469 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8471 break;
8472 default:
8473 gcc_unreachable ();
8476 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8477 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8478 TREE_SIDE_EFFECTS (arg1) = 1;
8479 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8482 return NULL_TREE;
8485 /* Fold a call to builtin modf. */
8487 static tree
8488 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8490 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8491 return NULL_TREE;
8493 STRIP_NOPS (arg0);
8495 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8496 return NULL_TREE;
8498 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8500 /* Proceed if a valid pointer type was passed in. */
8501 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8503 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8504 REAL_VALUE_TYPE trunc, frac;
8506 switch (value->cl)
8508 case rvc_nan:
8509 case rvc_zero:
8510 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8511 trunc = frac = *value;
8512 break;
8513 case rvc_inf:
8514 /* For +-Inf, return (*arg1 = arg0, +-0). */
8515 frac = dconst0;
8516 frac.sign = value->sign;
8517 trunc = *value;
8518 break;
8519 case rvc_normal:
8520 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8521 real_trunc (&trunc, VOIDmode, value);
8522 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8523 /* If the original number was negative and already
8524 integral, then the fractional part is -0.0. */
8525 if (value->sign && frac.cl == rvc_zero)
8526 frac.sign = value->sign;
8527 break;
8530 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8531 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8532 build_real (rettype, trunc));
8533 TREE_SIDE_EFFECTS (arg1) = 1;
8534 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8535 build_real (rettype, frac));
8538 return NULL_TREE;
8541 /* Given a location LOC, an interclass builtin function decl FNDECL
8542 and its single argument ARG, return an folded expression computing
8543 the same, or NULL_TREE if we either couldn't or didn't want to fold
8544 (the latter happen if there's an RTL instruction available). */
8546 static tree
8547 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8549 machine_mode mode;
8551 if (!validate_arg (arg, REAL_TYPE))
8552 return NULL_TREE;
8554 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8555 return NULL_TREE;
8557 mode = TYPE_MODE (TREE_TYPE (arg));
8559 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8561 /* If there is no optab, try generic code. */
8562 switch (DECL_FUNCTION_CODE (fndecl))
8564 tree result;
8566 CASE_FLT_FN (BUILT_IN_ISINF):
8568 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8569 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8570 tree type = TREE_TYPE (arg);
8571 REAL_VALUE_TYPE r;
8572 char buf[128];
8574 if (is_ibm_extended)
8576 /* NaN and Inf are encoded in the high-order double value
8577 only. The low-order value is not significant. */
8578 type = double_type_node;
8579 mode = DFmode;
8580 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8582 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8583 real_from_string (&r, buf);
8584 result = build_call_expr (isgr_fn, 2,
8585 fold_build1_loc (loc, ABS_EXPR, type, arg),
8586 build_real (type, r));
8587 return result;
8589 CASE_FLT_FN (BUILT_IN_FINITE):
8590 case BUILT_IN_ISFINITE:
8592 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8593 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8594 tree type = TREE_TYPE (arg);
8595 REAL_VALUE_TYPE r;
8596 char buf[128];
8598 if (is_ibm_extended)
8600 /* NaN and Inf are encoded in the high-order double value
8601 only. The low-order value is not significant. */
8602 type = double_type_node;
8603 mode = DFmode;
8604 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8606 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8607 real_from_string (&r, buf);
8608 result = build_call_expr (isle_fn, 2,
8609 fold_build1_loc (loc, ABS_EXPR, type, arg),
8610 build_real (type, r));
8611 /*result = fold_build2_loc (loc, UNGT_EXPR,
8612 TREE_TYPE (TREE_TYPE (fndecl)),
8613 fold_build1_loc (loc, ABS_EXPR, type, arg),
8614 build_real (type, r));
8615 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8616 TREE_TYPE (TREE_TYPE (fndecl)),
8617 result);*/
8618 return result;
8620 case BUILT_IN_ISNORMAL:
8622 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8623 islessequal(fabs(x),DBL_MAX). */
8624 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8625 tree type = TREE_TYPE (arg);
8626 tree orig_arg, max_exp, min_exp;
8627 machine_mode orig_mode = mode;
8628 REAL_VALUE_TYPE rmax, rmin;
8629 char buf[128];
8631 orig_arg = arg = builtin_save_expr (arg);
8632 if (is_ibm_extended)
8634 /* Use double to test the normal range of IBM extended
8635 precision. Emin for IBM extended precision is
8636 different to emin for IEEE double, being 53 higher
8637 since the low double exponent is at least 53 lower
8638 than the high double exponent. */
8639 type = double_type_node;
8640 mode = DFmode;
8641 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8643 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8645 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8646 real_from_string (&rmax, buf);
8647 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8648 real_from_string (&rmin, buf);
8649 max_exp = build_real (type, rmax);
8650 min_exp = build_real (type, rmin);
8652 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8653 if (is_ibm_extended)
8655 /* Testing the high end of the range is done just using
8656 the high double, using the same test as isfinite().
8657 For the subnormal end of the range we first test the
8658 high double, then if its magnitude is equal to the
8659 limit of 0x1p-969, we test whether the low double is
8660 non-zero and opposite sign to the high double. */
8661 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8662 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8663 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8664 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8665 arg, min_exp);
8666 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8667 complex_double_type_node, orig_arg);
8668 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8669 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8670 tree zero = build_real (type, dconst0);
8671 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8672 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8673 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8674 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8675 fold_build3 (COND_EXPR,
8676 integer_type_node,
8677 hilt, logt, lolt));
8678 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8679 eq_min, ok_lo);
8680 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8681 gt_min, eq_min);
8683 else
8685 tree const isge_fn
8686 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8687 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8689 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8690 max_exp, min_exp);
8691 return result;
8693 default:
8694 break;
8697 return NULL_TREE;
8700 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8701 ARG is the argument for the call. */
8703 static tree
8704 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8706 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8708 if (!validate_arg (arg, REAL_TYPE))
8709 return NULL_TREE;
8711 switch (builtin_index)
8713 case BUILT_IN_ISINF:
8714 if (tree_expr_infinite_p (arg))
8715 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8716 if (!tree_expr_maybe_infinite_p (arg))
8717 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8718 return NULL_TREE;
8720 case BUILT_IN_ISINF_SIGN:
8722 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8723 /* In a boolean context, GCC will fold the inner COND_EXPR to
8724 1. So e.g. "if (isinf_sign(x))" would be folded to just
8725 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8726 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8727 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8728 tree tmp = NULL_TREE;
8730 arg = builtin_save_expr (arg);
8732 if (signbit_fn && isinf_fn)
8734 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8735 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8737 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8738 signbit_call, integer_zero_node);
8739 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8740 isinf_call, integer_zero_node);
8742 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8743 integer_minus_one_node, integer_one_node);
8744 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8745 isinf_call, tmp,
8746 integer_zero_node);
8749 return tmp;
8752 case BUILT_IN_ISFINITE:
8753 if (tree_expr_finite_p (arg))
8754 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8755 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8756 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8757 return NULL_TREE;
8759 case BUILT_IN_ISNAN:
8760 if (tree_expr_nan_p (arg))
8761 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8762 if (!tree_expr_maybe_nan_p (arg))
8763 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8766 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8767 if (is_ibm_extended)
8769 /* NaN and Inf are encoded in the high-order double value
8770 only. The low-order value is not significant. */
8771 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8774 arg = builtin_save_expr (arg);
8775 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8777 default:
8778 gcc_unreachable ();
8782 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8783 This builtin will generate code to return the appropriate floating
8784 point classification depending on the value of the floating point
8785 number passed in. The possible return values must be supplied as
8786 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8787 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8788 one floating point argument which is "type generic". */
8790 static tree
8791 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8793 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8794 arg, type, res, tmp;
8795 machine_mode mode;
8796 REAL_VALUE_TYPE r;
8797 char buf[128];
8799 /* Verify the required arguments in the original call. */
8800 if (nargs != 6
8801 || !validate_arg (args[0], INTEGER_TYPE)
8802 || !validate_arg (args[1], INTEGER_TYPE)
8803 || !validate_arg (args[2], INTEGER_TYPE)
8804 || !validate_arg (args[3], INTEGER_TYPE)
8805 || !validate_arg (args[4], INTEGER_TYPE)
8806 || !validate_arg (args[5], REAL_TYPE))
8807 return NULL_TREE;
8809 fp_nan = args[0];
8810 fp_infinite = args[1];
8811 fp_normal = args[2];
8812 fp_subnormal = args[3];
8813 fp_zero = args[4];
8814 arg = args[5];
8815 type = TREE_TYPE (arg);
8816 mode = TYPE_MODE (type);
8817 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8819 /* fpclassify(x) ->
8820 isnan(x) ? FP_NAN :
8821 (fabs(x) == Inf ? FP_INFINITE :
8822 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8823 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8825 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8826 build_real (type, dconst0));
8827 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8828 tmp, fp_zero, fp_subnormal);
8830 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8831 real_from_string (&r, buf);
8832 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8833 arg, build_real (type, r));
8834 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8836 if (tree_expr_maybe_infinite_p (arg))
8838 real_inf (&r);
8839 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8840 build_real (type, r));
8841 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8842 fp_infinite, res);
8845 if (tree_expr_maybe_nan_p (arg))
8847 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8848 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8851 return res;
8854 /* Fold a call to an unordered comparison function such as
8855 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8856 being called and ARG0 and ARG1 are the arguments for the call.
8857 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8858 the opposite of the desired result. UNORDERED_CODE is used
8859 for modes that can hold NaNs and ORDERED_CODE is used for
8860 the rest. */
8862 static tree
8863 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8864 enum tree_code unordered_code,
8865 enum tree_code ordered_code)
8867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8868 enum tree_code code;
8869 tree type0, type1;
8870 enum tree_code code0, code1;
8871 tree cmp_type = NULL_TREE;
8873 type0 = TREE_TYPE (arg0);
8874 type1 = TREE_TYPE (arg1);
8876 code0 = TREE_CODE (type0);
8877 code1 = TREE_CODE (type1);
8879 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8880 /* Choose the wider of two real types. */
8881 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8882 ? type0 : type1;
8883 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8884 cmp_type = type0;
8885 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8886 cmp_type = type1;
8888 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8889 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8891 if (unordered_code == UNORDERED_EXPR)
8893 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
8894 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
8895 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
8896 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8897 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8900 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
8901 ? unordered_code : ordered_code;
8902 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8903 fold_build2_loc (loc, code, type, arg0, arg1));
8906 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8907 arithmetics if it can never overflow, or into internal functions that
8908 return both result of arithmetics and overflowed boolean flag in
8909 a complex integer result, or some other check for overflow.
8910 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8911 checking part of that. */
8913 static tree
8914 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8915 tree arg0, tree arg1, tree arg2)
8917 enum internal_fn ifn = IFN_LAST;
8918 /* The code of the expression corresponding to the built-in. */
8919 enum tree_code opcode = ERROR_MARK;
8920 bool ovf_only = false;
8922 switch (fcode)
8924 case BUILT_IN_ADD_OVERFLOW_P:
8925 ovf_only = true;
8926 /* FALLTHRU */
8927 case BUILT_IN_ADD_OVERFLOW:
8928 case BUILT_IN_SADD_OVERFLOW:
8929 case BUILT_IN_SADDL_OVERFLOW:
8930 case BUILT_IN_SADDLL_OVERFLOW:
8931 case BUILT_IN_UADD_OVERFLOW:
8932 case BUILT_IN_UADDL_OVERFLOW:
8933 case BUILT_IN_UADDLL_OVERFLOW:
8934 opcode = PLUS_EXPR;
8935 ifn = IFN_ADD_OVERFLOW;
8936 break;
8937 case BUILT_IN_SUB_OVERFLOW_P:
8938 ovf_only = true;
8939 /* FALLTHRU */
8940 case BUILT_IN_SUB_OVERFLOW:
8941 case BUILT_IN_SSUB_OVERFLOW:
8942 case BUILT_IN_SSUBL_OVERFLOW:
8943 case BUILT_IN_SSUBLL_OVERFLOW:
8944 case BUILT_IN_USUB_OVERFLOW:
8945 case BUILT_IN_USUBL_OVERFLOW:
8946 case BUILT_IN_USUBLL_OVERFLOW:
8947 opcode = MINUS_EXPR;
8948 ifn = IFN_SUB_OVERFLOW;
8949 break;
8950 case BUILT_IN_MUL_OVERFLOW_P:
8951 ovf_only = true;
8952 /* FALLTHRU */
8953 case BUILT_IN_MUL_OVERFLOW:
8954 case BUILT_IN_SMUL_OVERFLOW:
8955 case BUILT_IN_SMULL_OVERFLOW:
8956 case BUILT_IN_SMULLL_OVERFLOW:
8957 case BUILT_IN_UMUL_OVERFLOW:
8958 case BUILT_IN_UMULL_OVERFLOW:
8959 case BUILT_IN_UMULLL_OVERFLOW:
8960 opcode = MULT_EXPR;
8961 ifn = IFN_MUL_OVERFLOW;
8962 break;
8963 default:
8964 gcc_unreachable ();
8967 /* For the "generic" overloads, the first two arguments can have different
8968 types and the last argument determines the target type to use to check
8969 for overflow. The arguments of the other overloads all have the same
8970 type. */
8971 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8973 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8974 arguments are constant, attempt to fold the built-in call into a constant
8975 expression indicating whether or not it detected an overflow. */
8976 if (ovf_only
8977 && TREE_CODE (arg0) == INTEGER_CST
8978 && TREE_CODE (arg1) == INTEGER_CST)
8979 /* Perform the computation in the target type and check for overflow. */
8980 return omit_one_operand_loc (loc, boolean_type_node,
8981 arith_overflowed_p (opcode, type, arg0, arg1)
8982 ? boolean_true_node : boolean_false_node,
8983 arg2);
8985 tree intres, ovfres;
8986 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8988 intres = fold_binary_loc (loc, opcode, type,
8989 fold_convert_loc (loc, type, arg0),
8990 fold_convert_loc (loc, type, arg1));
8991 if (TREE_OVERFLOW (intres))
8992 intres = drop_tree_overflow (intres);
8993 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
8994 ? boolean_true_node : boolean_false_node);
8996 else
8998 tree ctype = build_complex_type (type);
8999 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9000 arg0, arg1);
9001 tree tgt = save_expr (call);
9002 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9003 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9004 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9007 if (ovf_only)
9008 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9010 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9011 tree store
9012 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9013 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9016 /* Fold a call to __builtin_FILE to a constant string. */
9018 static inline tree
9019 fold_builtin_FILE (location_t loc)
9021 if (const char *fname = LOCATION_FILE (loc))
9023 /* The documentation says this builtin is equivalent to the preprocessor
9024 __FILE__ macro so it appears appropriate to use the same file prefix
9025 mappings. */
9026 fname = remap_macro_filename (fname);
9027 return build_string_literal (strlen (fname) + 1, fname);
9030 return build_string_literal (1, "");
9033 /* Fold a call to __builtin_FUNCTION to a constant string. */
9035 static inline tree
9036 fold_builtin_FUNCTION ()
9038 const char *name = "";
9040 if (current_function_decl)
9041 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9043 return build_string_literal (strlen (name) + 1, name);
9046 /* Fold a call to __builtin_LINE to an integer constant. */
9048 static inline tree
9049 fold_builtin_LINE (location_t loc, tree type)
9051 return build_int_cst (type, LOCATION_LINE (loc));
9054 /* Fold a call to built-in function FNDECL with 0 arguments.
9055 This function returns NULL_TREE if no simplification was possible. */
9057 static tree
9058 fold_builtin_0 (location_t loc, tree fndecl)
9060 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9062 switch (fcode)
9064 case BUILT_IN_FILE:
9065 return fold_builtin_FILE (loc);
9067 case BUILT_IN_FUNCTION:
9068 return fold_builtin_FUNCTION ();
9070 case BUILT_IN_LINE:
9071 return fold_builtin_LINE (loc, type);
9073 CASE_FLT_FN (BUILT_IN_INF):
9074 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9075 case BUILT_IN_INFD32:
9076 case BUILT_IN_INFD64:
9077 case BUILT_IN_INFD128:
9078 return fold_builtin_inf (loc, type, true);
9080 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9081 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9082 return fold_builtin_inf (loc, type, false);
9084 case BUILT_IN_CLASSIFY_TYPE:
9085 return fold_builtin_classify_type (NULL_TREE);
9087 default:
9088 break;
9090 return NULL_TREE;
9093 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9094 This function returns NULL_TREE if no simplification was possible. */
9096 static tree
9097 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9099 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9100 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9102 if (TREE_CODE (arg0) == ERROR_MARK)
9103 return NULL_TREE;
9105 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9106 return ret;
9108 switch (fcode)
9110 case BUILT_IN_CONSTANT_P:
9112 tree val = fold_builtin_constant_p (arg0);
9114 /* Gimplification will pull the CALL_EXPR for the builtin out of
9115 an if condition. When not optimizing, we'll not CSE it back.
9116 To avoid link error types of regressions, return false now. */
9117 if (!val && !optimize)
9118 val = integer_zero_node;
9120 return val;
9123 case BUILT_IN_CLASSIFY_TYPE:
9124 return fold_builtin_classify_type (arg0);
9126 case BUILT_IN_STRLEN:
9127 return fold_builtin_strlen (loc, expr, type, arg0);
9129 CASE_FLT_FN (BUILT_IN_FABS):
9130 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9131 case BUILT_IN_FABSD32:
9132 case BUILT_IN_FABSD64:
9133 case BUILT_IN_FABSD128:
9134 return fold_builtin_fabs (loc, arg0, type);
9136 case BUILT_IN_ABS:
9137 case BUILT_IN_LABS:
9138 case BUILT_IN_LLABS:
9139 case BUILT_IN_IMAXABS:
9140 return fold_builtin_abs (loc, arg0, type);
9142 CASE_FLT_FN (BUILT_IN_CONJ):
9143 if (validate_arg (arg0, COMPLEX_TYPE)
9144 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9145 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9146 break;
9148 CASE_FLT_FN (BUILT_IN_CREAL):
9149 if (validate_arg (arg0, COMPLEX_TYPE)
9150 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9151 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9152 break;
9154 CASE_FLT_FN (BUILT_IN_CIMAG):
9155 if (validate_arg (arg0, COMPLEX_TYPE)
9156 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9157 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9158 break;
9160 CASE_FLT_FN (BUILT_IN_CARG):
9161 return fold_builtin_carg (loc, arg0, type);
9163 case BUILT_IN_ISASCII:
9164 return fold_builtin_isascii (loc, arg0);
9166 case BUILT_IN_TOASCII:
9167 return fold_builtin_toascii (loc, arg0);
9169 case BUILT_IN_ISDIGIT:
9170 return fold_builtin_isdigit (loc, arg0);
9172 CASE_FLT_FN (BUILT_IN_FINITE):
9173 case BUILT_IN_FINITED32:
9174 case BUILT_IN_FINITED64:
9175 case BUILT_IN_FINITED128:
9176 case BUILT_IN_ISFINITE:
9178 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9179 if (ret)
9180 return ret;
9181 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9184 CASE_FLT_FN (BUILT_IN_ISINF):
9185 case BUILT_IN_ISINFD32:
9186 case BUILT_IN_ISINFD64:
9187 case BUILT_IN_ISINFD128:
9189 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9190 if (ret)
9191 return ret;
9192 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9195 case BUILT_IN_ISNORMAL:
9196 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9198 case BUILT_IN_ISINF_SIGN:
9199 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9201 CASE_FLT_FN (BUILT_IN_ISNAN):
9202 case BUILT_IN_ISNAND32:
9203 case BUILT_IN_ISNAND64:
9204 case BUILT_IN_ISNAND128:
9205 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9207 case BUILT_IN_FREE:
9208 if (integer_zerop (arg0))
9209 return build_empty_stmt (loc);
9210 break;
9212 default:
9213 break;
9216 return NULL_TREE;
9220 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9221 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9222 if no simplification was possible. */
9224 static tree
9225 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9227 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9228 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9230 if (TREE_CODE (arg0) == ERROR_MARK
9231 || TREE_CODE (arg1) == ERROR_MARK)
9232 return NULL_TREE;
9234 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9235 return ret;
9237 switch (fcode)
9239 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9240 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9241 if (validate_arg (arg0, REAL_TYPE)
9242 && validate_arg (arg1, POINTER_TYPE))
9243 return do_mpfr_lgamma_r (arg0, arg1, type);
9244 break;
9246 CASE_FLT_FN (BUILT_IN_FREXP):
9247 return fold_builtin_frexp (loc, arg0, arg1, type);
9249 CASE_FLT_FN (BUILT_IN_MODF):
9250 return fold_builtin_modf (loc, arg0, arg1, type);
9252 case BUILT_IN_STRSPN:
9253 return fold_builtin_strspn (loc, expr, arg0, arg1);
9255 case BUILT_IN_STRCSPN:
9256 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9258 case BUILT_IN_STRPBRK:
9259 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9261 case BUILT_IN_EXPECT:
9262 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9264 case BUILT_IN_ISGREATER:
9265 return fold_builtin_unordered_cmp (loc, fndecl,
9266 arg0, arg1, UNLE_EXPR, LE_EXPR);
9267 case BUILT_IN_ISGREATEREQUAL:
9268 return fold_builtin_unordered_cmp (loc, fndecl,
9269 arg0, arg1, UNLT_EXPR, LT_EXPR);
9270 case BUILT_IN_ISLESS:
9271 return fold_builtin_unordered_cmp (loc, fndecl,
9272 arg0, arg1, UNGE_EXPR, GE_EXPR);
9273 case BUILT_IN_ISLESSEQUAL:
9274 return fold_builtin_unordered_cmp (loc, fndecl,
9275 arg0, arg1, UNGT_EXPR, GT_EXPR);
9276 case BUILT_IN_ISLESSGREATER:
9277 return fold_builtin_unordered_cmp (loc, fndecl,
9278 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9279 case BUILT_IN_ISUNORDERED:
9280 return fold_builtin_unordered_cmp (loc, fndecl,
9281 arg0, arg1, UNORDERED_EXPR,
9282 NOP_EXPR);
9284 /* We do the folding for va_start in the expander. */
9285 case BUILT_IN_VA_START:
9286 break;
9288 case BUILT_IN_OBJECT_SIZE:
9289 return fold_builtin_object_size (arg0, arg1);
9291 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9292 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9294 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9295 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9297 default:
9298 break;
9300 return NULL_TREE;
9303 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9304 and ARG2.
9305 This function returns NULL_TREE if no simplification was possible. */
9307 static tree
9308 fold_builtin_3 (location_t loc, tree fndecl,
9309 tree arg0, tree arg1, tree arg2)
9311 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9312 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9314 if (TREE_CODE (arg0) == ERROR_MARK
9315 || TREE_CODE (arg1) == ERROR_MARK
9316 || TREE_CODE (arg2) == ERROR_MARK)
9317 return NULL_TREE;
9319 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9320 arg0, arg1, arg2))
9321 return ret;
9323 switch (fcode)
9326 CASE_FLT_FN (BUILT_IN_SINCOS):
9327 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9329 CASE_FLT_FN (BUILT_IN_REMQUO):
9330 if (validate_arg (arg0, REAL_TYPE)
9331 && validate_arg (arg1, REAL_TYPE)
9332 && validate_arg (arg2, POINTER_TYPE))
9333 return do_mpfr_remquo (arg0, arg1, arg2);
9334 break;
9336 case BUILT_IN_MEMCMP:
9337 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9339 case BUILT_IN_EXPECT:
9340 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9342 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9343 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9345 case BUILT_IN_ADD_OVERFLOW:
9346 case BUILT_IN_SUB_OVERFLOW:
9347 case BUILT_IN_MUL_OVERFLOW:
9348 case BUILT_IN_ADD_OVERFLOW_P:
9349 case BUILT_IN_SUB_OVERFLOW_P:
9350 case BUILT_IN_MUL_OVERFLOW_P:
9351 case BUILT_IN_SADD_OVERFLOW:
9352 case BUILT_IN_SADDL_OVERFLOW:
9353 case BUILT_IN_SADDLL_OVERFLOW:
9354 case BUILT_IN_SSUB_OVERFLOW:
9355 case BUILT_IN_SSUBL_OVERFLOW:
9356 case BUILT_IN_SSUBLL_OVERFLOW:
9357 case BUILT_IN_SMUL_OVERFLOW:
9358 case BUILT_IN_SMULL_OVERFLOW:
9359 case BUILT_IN_SMULLL_OVERFLOW:
9360 case BUILT_IN_UADD_OVERFLOW:
9361 case BUILT_IN_UADDL_OVERFLOW:
9362 case BUILT_IN_UADDLL_OVERFLOW:
9363 case BUILT_IN_USUB_OVERFLOW:
9364 case BUILT_IN_USUBL_OVERFLOW:
9365 case BUILT_IN_USUBLL_OVERFLOW:
9366 case BUILT_IN_UMUL_OVERFLOW:
9367 case BUILT_IN_UMULL_OVERFLOW:
9368 case BUILT_IN_UMULLL_OVERFLOW:
9369 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9371 default:
9372 break;
9374 return NULL_TREE;
9377 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9378 ARGS is an array of NARGS arguments. IGNORE is true if the result
9379 of the function call is ignored. This function returns NULL_TREE
9380 if no simplification was possible. */
9382 static tree
9383 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9384 int nargs, bool)
9386 tree ret = NULL_TREE;
9388 switch (nargs)
9390 case 0:
9391 ret = fold_builtin_0 (loc, fndecl);
9392 break;
9393 case 1:
9394 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9395 break;
9396 case 2:
9397 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9398 break;
9399 case 3:
9400 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9401 break;
9402 default:
9403 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9404 break;
9406 if (ret)
9408 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9409 SET_EXPR_LOCATION (ret, loc);
9410 return ret;
9412 return NULL_TREE;
9415 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9416 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9417 of arguments in ARGS to be omitted. OLDNARGS is the number of
9418 elements in ARGS. */
9420 static tree
9421 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9422 int skip, tree fndecl, int n, va_list newargs)
9424 int nargs = oldnargs - skip + n;
9425 tree *buffer;
9427 if (n > 0)
9429 int i, j;
9431 buffer = XALLOCAVEC (tree, nargs);
9432 for (i = 0; i < n; i++)
9433 buffer[i] = va_arg (newargs, tree);
9434 for (j = skip; j < oldnargs; j++, i++)
9435 buffer[i] = args[j];
9437 else
9438 buffer = args + skip;
9440 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9443 /* Return true if FNDECL shouldn't be folded right now.
9444 If a built-in function has an inline attribute always_inline
9445 wrapper, defer folding it after always_inline functions have
9446 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9447 might not be performed. */
9449 bool
9450 avoid_folding_inline_builtin (tree fndecl)
9452 return (DECL_DECLARED_INLINE_P (fndecl)
9453 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9454 && cfun
9455 && !cfun->always_inline_functions_inlined
9456 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9459 /* A wrapper function for builtin folding that prevents warnings for
9460 "statement without effect" and the like, caused by removing the
9461 call node earlier than the warning is generated. */
9463 tree
9464 fold_call_expr (location_t loc, tree exp, bool ignore)
9466 tree ret = NULL_TREE;
9467 tree fndecl = get_callee_fndecl (exp);
9468 if (fndecl && fndecl_built_in_p (fndecl)
9469 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9470 yet. Defer folding until we see all the arguments
9471 (after inlining). */
9472 && !CALL_EXPR_VA_ARG_PACK (exp))
9474 int nargs = call_expr_nargs (exp);
9476 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9477 instead last argument is __builtin_va_arg_pack (). Defer folding
9478 even in that case, until arguments are finalized. */
9479 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9481 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9482 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9483 return NULL_TREE;
9486 if (avoid_folding_inline_builtin (fndecl))
9487 return NULL_TREE;
9489 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9490 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9491 CALL_EXPR_ARGP (exp), ignore);
9492 else
9494 tree *args = CALL_EXPR_ARGP (exp);
9495 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9496 if (ret)
9497 return ret;
9500 return NULL_TREE;
9503 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9504 N arguments are passed in the array ARGARRAY. Return a folded
9505 expression or NULL_TREE if no simplification was possible. */
9507 tree
9508 fold_builtin_call_array (location_t loc, tree,
9509 tree fn,
9510 int n,
9511 tree *argarray)
9513 if (TREE_CODE (fn) != ADDR_EXPR)
9514 return NULL_TREE;
9516 tree fndecl = TREE_OPERAND (fn, 0);
9517 if (TREE_CODE (fndecl) == FUNCTION_DECL
9518 && fndecl_built_in_p (fndecl))
9520 /* If last argument is __builtin_va_arg_pack (), arguments to this
9521 function are not finalized yet. Defer folding until they are. */
9522 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9524 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9525 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9526 return NULL_TREE;
9528 if (avoid_folding_inline_builtin (fndecl))
9529 return NULL_TREE;
9530 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9531 return targetm.fold_builtin (fndecl, n, argarray, false);
9532 else
9533 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9536 return NULL_TREE;
9539 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9540 along with N new arguments specified as the "..." parameters. SKIP
9541 is the number of arguments in EXP to be omitted. This function is used
9542 to do varargs-to-varargs transformations. */
9544 static tree
9545 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9547 va_list ap;
9548 tree t;
9550 va_start (ap, n);
9551 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9552 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9553 va_end (ap);
9555 return t;
9558 /* Validate a single argument ARG against a tree code CODE representing
9559 a type. Return true when argument is valid. */
9561 static bool
9562 validate_arg (const_tree arg, enum tree_code code)
9564 if (!arg)
9565 return false;
9566 else if (code == POINTER_TYPE)
9567 return POINTER_TYPE_P (TREE_TYPE (arg));
9568 else if (code == INTEGER_TYPE)
9569 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9570 return code == TREE_CODE (TREE_TYPE (arg));
9573 /* This function validates the types of a function call argument list
9574 against a specified list of tree_codes. If the last specifier is a 0,
9575 that represents an ellipses, otherwise the last specifier must be a
9576 VOID_TYPE.
9578 This is the GIMPLE version of validate_arglist. Eventually we want to
9579 completely convert builtins.c to work from GIMPLEs and the tree based
9580 validate_arglist will then be removed. */
9582 bool
9583 validate_gimple_arglist (const gcall *call, ...)
9585 enum tree_code code;
9586 bool res = 0;
9587 va_list ap;
9588 const_tree arg;
9589 size_t i;
9591 va_start (ap, call);
9592 i = 0;
9596 code = (enum tree_code) va_arg (ap, int);
9597 switch (code)
9599 case 0:
9600 /* This signifies an ellipses, any further arguments are all ok. */
9601 res = true;
9602 goto end;
9603 case VOID_TYPE:
9604 /* This signifies an endlink, if no arguments remain, return
9605 true, otherwise return false. */
9606 res = (i == gimple_call_num_args (call));
9607 goto end;
9608 default:
9609 /* If no parameters remain or the parameter's code does not
9610 match the specified code, return false. Otherwise continue
9611 checking any remaining arguments. */
9612 arg = gimple_call_arg (call, i++);
9613 if (!validate_arg (arg, code))
9614 goto end;
9615 break;
9618 while (1);
9620 /* We need gotos here since we can only have one VA_CLOSE in a
9621 function. */
9622 end: ;
9623 va_end (ap);
9625 return res;
9628 /* Default target-specific builtin expander that does nothing. */
9631 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9632 rtx target ATTRIBUTE_UNUSED,
9633 rtx subtarget ATTRIBUTE_UNUSED,
9634 machine_mode mode ATTRIBUTE_UNUSED,
9635 int ignore ATTRIBUTE_UNUSED)
9637 return NULL_RTX;
9640 /* Returns true is EXP represents data that would potentially reside
9641 in a readonly section. */
9643 bool
9644 readonly_data_expr (tree exp)
9646 STRIP_NOPS (exp);
9648 if (TREE_CODE (exp) != ADDR_EXPR)
9649 return false;
9651 exp = get_base_address (TREE_OPERAND (exp, 0));
9652 if (!exp)
9653 return false;
9655 /* Make sure we call decl_readonly_section only for trees it
9656 can handle (since it returns true for everything it doesn't
9657 understand). */
9658 if (TREE_CODE (exp) == STRING_CST
9659 || TREE_CODE (exp) == CONSTRUCTOR
9660 || (VAR_P (exp) && TREE_STATIC (exp)))
9661 return decl_readonly_section (exp, 0);
9662 else
9663 return false;
9666 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9667 to the call, and TYPE is its return type.
9669 Return NULL_TREE if no simplification was possible, otherwise return the
9670 simplified form of the call as a tree.
9672 The simplified form may be a constant or other expression which
9673 computes the same value, but in a more efficient manner (including
9674 calls to other builtin functions).
9676 The call may contain arguments which need to be evaluated, but
9677 which are not useful to determine the result of the call. In
9678 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9679 COMPOUND_EXPR will be an argument which must be evaluated.
9680 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9681 COMPOUND_EXPR in the chain will contain the tree for the simplified
9682 form of the builtin function call. */
9684 static tree
9685 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9687 if (!validate_arg (s1, POINTER_TYPE)
9688 || !validate_arg (s2, POINTER_TYPE))
9689 return NULL_TREE;
9691 tree fn;
9692 const char *p1, *p2;
9694 p2 = c_getstr (s2);
9695 if (p2 == NULL)
9696 return NULL_TREE;
9698 p1 = c_getstr (s1);
9699 if (p1 != NULL)
9701 const char *r = strpbrk (p1, p2);
9702 tree tem;
9704 if (r == NULL)
9705 return build_int_cst (TREE_TYPE (s1), 0);
9707 /* Return an offset into the constant string argument. */
9708 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9709 return fold_convert_loc (loc, type, tem);
9712 if (p2[0] == '\0')
9713 /* strpbrk(x, "") == NULL.
9714 Evaluate and ignore s1 in case it had side-effects. */
9715 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9717 if (p2[1] != '\0')
9718 return NULL_TREE; /* Really call strpbrk. */
9720 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9721 if (!fn)
9722 return NULL_TREE;
9724 /* New argument list transforming strpbrk(s1, s2) to
9725 strchr(s1, s2[0]). */
9726 return build_call_expr_loc (loc, fn, 2, s1,
9727 build_int_cst (integer_type_node, p2[0]));
9730 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9731 to the call.
9733 Return NULL_TREE if no simplification was possible, otherwise return the
9734 simplified form of the call as a tree.
9736 The simplified form may be a constant or other expression which
9737 computes the same value, but in a more efficient manner (including
9738 calls to other builtin functions).
9740 The call may contain arguments which need to be evaluated, but
9741 which are not useful to determine the result of the call. In
9742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9743 COMPOUND_EXPR will be an argument which must be evaluated.
9744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9745 COMPOUND_EXPR in the chain will contain the tree for the simplified
9746 form of the builtin function call. */
9748 static tree
9749 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9751 if (!validate_arg (s1, POINTER_TYPE)
9752 || !validate_arg (s2, POINTER_TYPE))
9753 return NULL_TREE;
9755 if (!check_nul_terminated_array (expr, s1)
9756 || !check_nul_terminated_array (expr, s2))
9757 return NULL_TREE;
9759 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9761 /* If either argument is "", return NULL_TREE. */
9762 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9763 /* Evaluate and ignore both arguments in case either one has
9764 side-effects. */
9765 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9766 s1, s2);
9767 return NULL_TREE;
9770 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9771 to the call.
9773 Return NULL_TREE if no simplification was possible, otherwise return the
9774 simplified form of the call as a tree.
9776 The simplified form may be a constant or other expression which
9777 computes the same value, but in a more efficient manner (including
9778 calls to other builtin functions).
9780 The call may contain arguments which need to be evaluated, but
9781 which are not useful to determine the result of the call. In
9782 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9783 COMPOUND_EXPR will be an argument which must be evaluated.
9784 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9785 COMPOUND_EXPR in the chain will contain the tree for the simplified
9786 form of the builtin function call. */
9788 static tree
9789 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9791 if (!validate_arg (s1, POINTER_TYPE)
9792 || !validate_arg (s2, POINTER_TYPE))
9793 return NULL_TREE;
9795 if (!check_nul_terminated_array (expr, s1)
9796 || !check_nul_terminated_array (expr, s2))
9797 return NULL_TREE;
9799 /* If the first argument is "", return NULL_TREE. */
9800 const char *p1 = c_getstr (s1);
9801 if (p1 && *p1 == '\0')
9803 /* Evaluate and ignore argument s2 in case it has
9804 side-effects. */
9805 return omit_one_operand_loc (loc, size_type_node,
9806 size_zero_node, s2);
9809 /* If the second argument is "", return __builtin_strlen(s1). */
9810 const char *p2 = c_getstr (s2);
9811 if (p2 && *p2 == '\0')
9813 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9815 /* If the replacement _DECL isn't initialized, don't do the
9816 transformation. */
9817 if (!fn)
9818 return NULL_TREE;
9820 return build_call_expr_loc (loc, fn, 1, s1);
9822 return NULL_TREE;
9825 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9826 produced. False otherwise. This is done so that we don't output the error
9827 or warning twice or three times. */
9829 bool
9830 fold_builtin_next_arg (tree exp, bool va_start_p)
9832 tree fntype = TREE_TYPE (current_function_decl);
9833 int nargs = call_expr_nargs (exp);
9834 tree arg;
9835 /* There is good chance the current input_location points inside the
9836 definition of the va_start macro (perhaps on the token for
9837 builtin) in a system header, so warnings will not be emitted.
9838 Use the location in real source code. */
9839 location_t current_location =
9840 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9841 NULL);
9843 if (!stdarg_p (fntype))
9845 error ("%<va_start%> used in function with fixed arguments");
9846 return true;
9849 if (va_start_p)
9851 if (va_start_p && (nargs != 2))
9853 error ("wrong number of arguments to function %<va_start%>");
9854 return true;
9856 arg = CALL_EXPR_ARG (exp, 1);
9858 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9859 when we checked the arguments and if needed issued a warning. */
9860 else
9862 if (nargs == 0)
9864 /* Evidently an out of date version of <stdarg.h>; can't validate
9865 va_start's second argument, but can still work as intended. */
9866 warning_at (current_location,
9867 OPT_Wvarargs,
9868 "%<__builtin_next_arg%> called without an argument");
9869 return true;
9871 else if (nargs > 1)
9873 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9874 return true;
9876 arg = CALL_EXPR_ARG (exp, 0);
9879 if (TREE_CODE (arg) == SSA_NAME
9880 && SSA_NAME_VAR (arg))
9881 arg = SSA_NAME_VAR (arg);
9883 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9884 or __builtin_next_arg (0) the first time we see it, after checking
9885 the arguments and if needed issuing a warning. */
9886 if (!integer_zerop (arg))
9888 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9890 /* Strip off all nops for the sake of the comparison. This
9891 is not quite the same as STRIP_NOPS. It does more.
9892 We must also strip off INDIRECT_EXPR for C++ reference
9893 parameters. */
9894 while (CONVERT_EXPR_P (arg)
9895 || TREE_CODE (arg) == INDIRECT_REF)
9896 arg = TREE_OPERAND (arg, 0);
9897 if (arg != last_parm)
9899 /* FIXME: Sometimes with the tree optimizers we can get the
9900 not the last argument even though the user used the last
9901 argument. We just warn and set the arg to be the last
9902 argument so that we will get wrong-code because of
9903 it. */
9904 warning_at (current_location,
9905 OPT_Wvarargs,
9906 "second parameter of %<va_start%> not last named argument");
9909 /* Undefined by C99 7.15.1.4p4 (va_start):
9910 "If the parameter parmN is declared with the register storage
9911 class, with a function or array type, or with a type that is
9912 not compatible with the type that results after application of
9913 the default argument promotions, the behavior is undefined."
9915 else if (DECL_REGISTER (arg))
9917 warning_at (current_location,
9918 OPT_Wvarargs,
9919 "undefined behavior when second parameter of "
9920 "%<va_start%> is declared with %<register%> storage");
9923 /* We want to verify the second parameter just once before the tree
9924 optimizers are run and then avoid keeping it in the tree,
9925 as otherwise we could warn even for correct code like:
9926 void foo (int i, ...)
9927 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9928 if (va_start_p)
9929 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9930 else
9931 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9933 return false;
9937 /* Expand a call EXP to __builtin_object_size. */
9939 static rtx
9940 expand_builtin_object_size (tree exp)
9942 tree ost;
9943 int object_size_type;
9944 tree fndecl = get_callee_fndecl (exp);
9946 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9948 error ("first argument of %qD must be a pointer, second integer constant",
9949 fndecl);
9950 expand_builtin_trap ();
9951 return const0_rtx;
9954 ost = CALL_EXPR_ARG (exp, 1);
9955 STRIP_NOPS (ost);
9957 if (TREE_CODE (ost) != INTEGER_CST
9958 || tree_int_cst_sgn (ost) < 0
9959 || compare_tree_int (ost, 3) > 0)
9961 error ("last argument of %qD is not integer constant between 0 and 3",
9962 fndecl);
9963 expand_builtin_trap ();
9964 return const0_rtx;
9967 object_size_type = tree_to_shwi (ost);
9969 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9972 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9973 FCODE is the BUILT_IN_* to use.
9974 Return NULL_RTX if we failed; the caller should emit a normal call,
9975 otherwise try to get the result in TARGET, if convenient (and in
9976 mode MODE if that's convenient). */
9978 static rtx
9979 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9980 enum built_in_function fcode)
9982 if (!validate_arglist (exp,
9983 POINTER_TYPE,
9984 fcode == BUILT_IN_MEMSET_CHK
9985 ? INTEGER_TYPE : POINTER_TYPE,
9986 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9987 return NULL_RTX;
9989 tree dest = CALL_EXPR_ARG (exp, 0);
9990 tree src = CALL_EXPR_ARG (exp, 1);
9991 tree len = CALL_EXPR_ARG (exp, 2);
9992 tree size = CALL_EXPR_ARG (exp, 3);
9994 /* FIXME: Set access mode to write only for memset et al. */
9995 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
9996 /*srcstr=*/NULL_TREE, size, access_read_write);
9998 if (!tree_fits_uhwi_p (size))
9999 return NULL_RTX;
10001 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10003 /* Avoid transforming the checking call to an ordinary one when
10004 an overflow has been detected or when the call couldn't be
10005 validated because the size is not constant. */
10006 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10007 return NULL_RTX;
10009 tree fn = NULL_TREE;
10010 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10011 mem{cpy,pcpy,move,set} is available. */
10012 switch (fcode)
10014 case BUILT_IN_MEMCPY_CHK:
10015 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10016 break;
10017 case BUILT_IN_MEMPCPY_CHK:
10018 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10019 break;
10020 case BUILT_IN_MEMMOVE_CHK:
10021 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10022 break;
10023 case BUILT_IN_MEMSET_CHK:
10024 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10025 break;
10026 default:
10027 break;
10030 if (! fn)
10031 return NULL_RTX;
10033 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10034 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10035 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10036 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10038 else if (fcode == BUILT_IN_MEMSET_CHK)
10039 return NULL_RTX;
10040 else
10042 unsigned int dest_align = get_pointer_alignment (dest);
10044 /* If DEST is not a pointer type, call the normal function. */
10045 if (dest_align == 0)
10046 return NULL_RTX;
10048 /* If SRC and DEST are the same (and not volatile), do nothing. */
10049 if (operand_equal_p (src, dest, 0))
10051 tree expr;
10053 if (fcode != BUILT_IN_MEMPCPY_CHK)
10055 /* Evaluate and ignore LEN in case it has side-effects. */
10056 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10057 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10060 expr = fold_build_pointer_plus (dest, len);
10061 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10064 /* __memmove_chk special case. */
10065 if (fcode == BUILT_IN_MEMMOVE_CHK)
10067 unsigned int src_align = get_pointer_alignment (src);
10069 if (src_align == 0)
10070 return NULL_RTX;
10072 /* If src is categorized for a readonly section we can use
10073 normal __memcpy_chk. */
10074 if (readonly_data_expr (src))
10076 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10077 if (!fn)
10078 return NULL_RTX;
10079 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10080 dest, src, len, size);
10081 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10082 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10083 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10086 return NULL_RTX;
10090 /* Emit warning if a buffer overflow is detected at compile time. */
10092 static void
10093 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10095 /* The source string. */
10096 tree srcstr = NULL_TREE;
10097 /* The size of the destination object returned by __builtin_object_size. */
10098 tree objsize = NULL_TREE;
10099 /* The string that is being concatenated with (as in __strcat_chk)
10100 or null if it isn't. */
10101 tree catstr = NULL_TREE;
10102 /* The maximum length of the source sequence in a bounded operation
10103 (such as __strncat_chk) or null if the operation isn't bounded
10104 (such as __strcat_chk). */
10105 tree maxread = NULL_TREE;
10106 /* The exact size of the access (such as in __strncpy_chk). */
10107 tree size = NULL_TREE;
10108 /* The access by the function that's checked. Except for snprintf
10109 both writing and reading is checked. */
10110 access_mode mode = access_read_write;
10112 switch (fcode)
10114 case BUILT_IN_STRCPY_CHK:
10115 case BUILT_IN_STPCPY_CHK:
10116 srcstr = CALL_EXPR_ARG (exp, 1);
10117 objsize = CALL_EXPR_ARG (exp, 2);
10118 break;
10120 case BUILT_IN_STRCAT_CHK:
10121 /* For __strcat_chk the warning will be emitted only if overflowing
10122 by at least strlen (dest) + 1 bytes. */
10123 catstr = CALL_EXPR_ARG (exp, 0);
10124 srcstr = CALL_EXPR_ARG (exp, 1);
10125 objsize = CALL_EXPR_ARG (exp, 2);
10126 break;
10128 case BUILT_IN_STRNCAT_CHK:
10129 catstr = CALL_EXPR_ARG (exp, 0);
10130 srcstr = CALL_EXPR_ARG (exp, 1);
10131 maxread = CALL_EXPR_ARG (exp, 2);
10132 objsize = CALL_EXPR_ARG (exp, 3);
10133 break;
10135 case BUILT_IN_STRNCPY_CHK:
10136 case BUILT_IN_STPNCPY_CHK:
10137 srcstr = CALL_EXPR_ARG (exp, 1);
10138 size = CALL_EXPR_ARG (exp, 2);
10139 objsize = CALL_EXPR_ARG (exp, 3);
10140 break;
10142 case BUILT_IN_SNPRINTF_CHK:
10143 case BUILT_IN_VSNPRINTF_CHK:
10144 maxread = CALL_EXPR_ARG (exp, 1);
10145 objsize = CALL_EXPR_ARG (exp, 3);
10146 /* The only checked access the write to the destination. */
10147 mode = access_write_only;
10148 break;
10149 default:
10150 gcc_unreachable ();
10153 if (catstr && maxread)
10155 /* Check __strncat_chk. There is no way to determine the length
10156 of the string to which the source string is being appended so
10157 just warn when the length of the source string is not known. */
10158 check_strncat_sizes (exp, objsize);
10159 return;
10162 check_access (exp, size, maxread, srcstr, objsize, mode);
10165 /* Emit warning if a buffer overflow is detected at compile time
10166 in __sprintf_chk/__vsprintf_chk calls. */
10168 static void
10169 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10171 tree size, len, fmt;
10172 const char *fmt_str;
10173 int nargs = call_expr_nargs (exp);
10175 /* Verify the required arguments in the original call. */
10177 if (nargs < 4)
10178 return;
10179 size = CALL_EXPR_ARG (exp, 2);
10180 fmt = CALL_EXPR_ARG (exp, 3);
10182 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10183 return;
10185 /* Check whether the format is a literal string constant. */
10186 fmt_str = c_getstr (fmt);
10187 if (fmt_str == NULL)
10188 return;
10190 if (!init_target_chars ())
10191 return;
10193 /* If the format doesn't contain % args or %%, we know its size. */
10194 if (strchr (fmt_str, target_percent) == 0)
10195 len = build_int_cstu (size_type_node, strlen (fmt_str));
10196 /* If the format is "%s" and first ... argument is a string literal,
10197 we know it too. */
10198 else if (fcode == BUILT_IN_SPRINTF_CHK
10199 && strcmp (fmt_str, target_percent_s) == 0)
10201 tree arg;
10203 if (nargs < 5)
10204 return;
10205 arg = CALL_EXPR_ARG (exp, 4);
10206 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10207 return;
10209 len = c_strlen (arg, 1);
10210 if (!len || ! tree_fits_uhwi_p (len))
10211 return;
10213 else
10214 return;
10216 /* Add one for the terminating nul. */
10217 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10219 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10220 access_write_only);
10223 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10224 if possible. */
10226 static tree
10227 fold_builtin_object_size (tree ptr, tree ost)
10229 unsigned HOST_WIDE_INT bytes;
10230 int object_size_type;
10232 if (!validate_arg (ptr, POINTER_TYPE)
10233 || !validate_arg (ost, INTEGER_TYPE))
10234 return NULL_TREE;
10236 STRIP_NOPS (ost);
10238 if (TREE_CODE (ost) != INTEGER_CST
10239 || tree_int_cst_sgn (ost) < 0
10240 || compare_tree_int (ost, 3) > 0)
10241 return NULL_TREE;
10243 object_size_type = tree_to_shwi (ost);
10245 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10246 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10247 and (size_t) 0 for types 2 and 3. */
10248 if (TREE_SIDE_EFFECTS (ptr))
10249 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10251 if (TREE_CODE (ptr) == ADDR_EXPR)
10253 compute_builtin_object_size (ptr, object_size_type, &bytes);
10254 if (wi::fits_to_tree_p (bytes, size_type_node))
10255 return build_int_cstu (size_type_node, bytes);
10257 else if (TREE_CODE (ptr) == SSA_NAME)
10259 /* If object size is not known yet, delay folding until
10260 later. Maybe subsequent passes will help determining
10261 it. */
10262 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10263 && wi::fits_to_tree_p (bytes, size_type_node))
10264 return build_int_cstu (size_type_node, bytes);
10267 return NULL_TREE;
10270 /* Builtins with folding operations that operate on "..." arguments
10271 need special handling; we need to store the arguments in a convenient
10272 data structure before attempting any folding. Fortunately there are
10273 only a few builtins that fall into this category. FNDECL is the
10274 function, EXP is the CALL_EXPR for the call. */
10276 static tree
10277 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10279 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10280 tree ret = NULL_TREE;
10282 switch (fcode)
10284 case BUILT_IN_FPCLASSIFY:
10285 ret = fold_builtin_fpclassify (loc, args, nargs);
10286 break;
10288 default:
10289 break;
10291 if (ret)
10293 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10294 SET_EXPR_LOCATION (ret, loc);
10295 suppress_warning (ret);
10296 return ret;
10298 return NULL_TREE;
10301 /* Initialize format string characters in the target charset. */
10303 bool
10304 init_target_chars (void)
10306 static bool init;
10307 if (!init)
10309 target_newline = lang_hooks.to_target_charset ('\n');
10310 target_percent = lang_hooks.to_target_charset ('%');
10311 target_c = lang_hooks.to_target_charset ('c');
10312 target_s = lang_hooks.to_target_charset ('s');
10313 if (target_newline == 0 || target_percent == 0 || target_c == 0
10314 || target_s == 0)
10315 return false;
10317 target_percent_c[0] = target_percent;
10318 target_percent_c[1] = target_c;
10319 target_percent_c[2] = '\0';
10321 target_percent_s[0] = target_percent;
10322 target_percent_s[1] = target_s;
10323 target_percent_s[2] = '\0';
10325 target_percent_s_newline[0] = target_percent;
10326 target_percent_s_newline[1] = target_s;
10327 target_percent_s_newline[2] = target_newline;
10328 target_percent_s_newline[3] = '\0';
10330 init = true;
10332 return true;
10335 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10336 and no overflow/underflow occurred. INEXACT is true if M was not
10337 exactly calculated. TYPE is the tree type for the result. This
10338 function assumes that you cleared the MPFR flags and then
10339 calculated M to see if anything subsequently set a flag prior to
10340 entering this function. Return NULL_TREE if any checks fail. */
10342 static tree
10343 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10345 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10346 overflow/underflow occurred. If -frounding-math, proceed iff the
10347 result of calling FUNC was exact. */
10348 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10349 && (!flag_rounding_math || !inexact))
10351 REAL_VALUE_TYPE rr;
10353 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10354 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10355 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10356 but the mpft_t is not, then we underflowed in the
10357 conversion. */
10358 if (real_isfinite (&rr)
10359 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10361 REAL_VALUE_TYPE rmode;
10363 real_convert (&rmode, TYPE_MODE (type), &rr);
10364 /* Proceed iff the specified mode can hold the value. */
10365 if (real_identical (&rmode, &rr))
10366 return build_real (type, rmode);
10369 return NULL_TREE;
10372 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10373 number and no overflow/underflow occurred. INEXACT is true if M
10374 was not exactly calculated. TYPE is the tree type for the result.
10375 This function assumes that you cleared the MPFR flags and then
10376 calculated M to see if anything subsequently set a flag prior to
10377 entering this function. Return NULL_TREE if any checks fail, if
10378 FORCE_CONVERT is true, then bypass the checks. */
10380 static tree
10381 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10383 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10384 overflow/underflow occurred. If -frounding-math, proceed iff the
10385 result of calling FUNC was exact. */
10386 if (force_convert
10387 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10388 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10389 && (!flag_rounding_math || !inexact)))
10391 REAL_VALUE_TYPE re, im;
10393 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10394 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10395 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10396 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10397 but the mpft_t is not, then we underflowed in the
10398 conversion. */
10399 if (force_convert
10400 || (real_isfinite (&re) && real_isfinite (&im)
10401 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10402 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10404 REAL_VALUE_TYPE re_mode, im_mode;
10406 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10407 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10408 /* Proceed iff the specified mode can hold the value. */
10409 if (force_convert
10410 || (real_identical (&re_mode, &re)
10411 && real_identical (&im_mode, &im)))
10412 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10413 build_real (TREE_TYPE (type), im_mode));
10416 return NULL_TREE;
10419 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10420 the pointer *(ARG_QUO) and return the result. The type is taken
10421 from the type of ARG0 and is used for setting the precision of the
10422 calculation and results. */
10424 static tree
10425 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10427 tree const type = TREE_TYPE (arg0);
10428 tree result = NULL_TREE;
10430 STRIP_NOPS (arg0);
10431 STRIP_NOPS (arg1);
10433 /* To proceed, MPFR must exactly represent the target floating point
10434 format, which only happens when the target base equals two. */
10435 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10436 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10437 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10439 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10440 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10442 if (real_isfinite (ra0) && real_isfinite (ra1))
10444 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10445 const int prec = fmt->p;
10446 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10447 tree result_rem;
10448 long integer_quo;
10449 mpfr_t m0, m1;
10451 mpfr_inits2 (prec, m0, m1, NULL);
10452 mpfr_from_real (m0, ra0, MPFR_RNDN);
10453 mpfr_from_real (m1, ra1, MPFR_RNDN);
10454 mpfr_clear_flags ();
10455 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10456 /* Remquo is independent of the rounding mode, so pass
10457 inexact=0 to do_mpfr_ckconv(). */
10458 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10459 mpfr_clears (m0, m1, NULL);
10460 if (result_rem)
10462 /* MPFR calculates quo in the host's long so it may
10463 return more bits in quo than the target int can hold
10464 if sizeof(host long) > sizeof(target int). This can
10465 happen even for native compilers in LP64 mode. In
10466 these cases, modulo the quo value with the largest
10467 number that the target int can hold while leaving one
10468 bit for the sign. */
10469 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10470 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10472 /* Dereference the quo pointer argument. */
10473 arg_quo = build_fold_indirect_ref (arg_quo);
10474 /* Proceed iff a valid pointer type was passed in. */
10475 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10477 /* Set the value. */
10478 tree result_quo
10479 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10480 build_int_cst (TREE_TYPE (arg_quo),
10481 integer_quo));
10482 TREE_SIDE_EFFECTS (result_quo) = 1;
10483 /* Combine the quo assignment with the rem. */
10484 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10485 result_quo, result_rem));
10490 return result;
10493 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10494 resulting value as a tree with type TYPE. The mpfr precision is
10495 set to the precision of TYPE. We assume that this mpfr function
10496 returns zero if the result could be calculated exactly within the
10497 requested precision. In addition, the integer pointer represented
10498 by ARG_SG will be dereferenced and set to the appropriate signgam
10499 (-1,1) value. */
10501 static tree
10502 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10504 tree result = NULL_TREE;
10506 STRIP_NOPS (arg);
10508 /* To proceed, MPFR must exactly represent the target floating point
10509 format, which only happens when the target base equals two. Also
10510 verify ARG is a constant and that ARG_SG is an int pointer. */
10511 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10512 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10513 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10514 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10516 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10518 /* In addition to NaN and Inf, the argument cannot be zero or a
10519 negative integer. */
10520 if (real_isfinite (ra)
10521 && ra->cl != rvc_zero
10522 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10524 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10525 const int prec = fmt->p;
10526 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10527 int inexact, sg;
10528 mpfr_t m;
10529 tree result_lg;
10531 mpfr_init2 (m, prec);
10532 mpfr_from_real (m, ra, MPFR_RNDN);
10533 mpfr_clear_flags ();
10534 inexact = mpfr_lgamma (m, &sg, m, rnd);
10535 result_lg = do_mpfr_ckconv (m, type, inexact);
10536 mpfr_clear (m);
10537 if (result_lg)
10539 tree result_sg;
10541 /* Dereference the arg_sg pointer argument. */
10542 arg_sg = build_fold_indirect_ref (arg_sg);
10543 /* Assign the signgam value into *arg_sg. */
10544 result_sg = fold_build2 (MODIFY_EXPR,
10545 TREE_TYPE (arg_sg), arg_sg,
10546 build_int_cst (TREE_TYPE (arg_sg), sg));
10547 TREE_SIDE_EFFECTS (result_sg) = 1;
10548 /* Combine the signgam assignment with the lgamma result. */
10549 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10550 result_sg, result_lg));
10555 return result;
10558 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10559 mpc function FUNC on it and return the resulting value as a tree
10560 with type TYPE. The mpfr precision is set to the precision of
10561 TYPE. We assume that function FUNC returns zero if the result
10562 could be calculated exactly within the requested precision. If
10563 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10564 in the arguments and/or results. */
10566 tree
10567 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10568 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10570 tree result = NULL_TREE;
10572 STRIP_NOPS (arg0);
10573 STRIP_NOPS (arg1);
10575 /* To proceed, MPFR must exactly represent the target floating point
10576 format, which only happens when the target base equals two. */
10577 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10578 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10579 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10580 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10581 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10583 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10584 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10585 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10586 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10588 if (do_nonfinite
10589 || (real_isfinite (re0) && real_isfinite (im0)
10590 && real_isfinite (re1) && real_isfinite (im1)))
10592 const struct real_format *const fmt =
10593 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10594 const int prec = fmt->p;
10595 const mpfr_rnd_t rnd = fmt->round_towards_zero
10596 ? MPFR_RNDZ : MPFR_RNDN;
10597 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10598 int inexact;
10599 mpc_t m0, m1;
10601 mpc_init2 (m0, prec);
10602 mpc_init2 (m1, prec);
10603 mpfr_from_real (mpc_realref (m0), re0, rnd);
10604 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10605 mpfr_from_real (mpc_realref (m1), re1, rnd);
10606 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10607 mpfr_clear_flags ();
10608 inexact = func (m0, m0, m1, crnd);
10609 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10610 mpc_clear (m0);
10611 mpc_clear (m1);
10615 return result;
10618 /* A wrapper function for builtin folding that prevents warnings for
10619 "statement without effect" and the like, caused by removing the
10620 call node earlier than the warning is generated. */
10622 tree
10623 fold_call_stmt (gcall *stmt, bool ignore)
10625 tree ret = NULL_TREE;
10626 tree fndecl = gimple_call_fndecl (stmt);
10627 location_t loc = gimple_location (stmt);
10628 if (fndecl && fndecl_built_in_p (fndecl)
10629 && !gimple_call_va_arg_pack_p (stmt))
10631 int nargs = gimple_call_num_args (stmt);
10632 tree *args = (nargs > 0
10633 ? gimple_call_arg_ptr (stmt, 0)
10634 : &error_mark_node);
10636 if (avoid_folding_inline_builtin (fndecl))
10637 return NULL_TREE;
10638 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10640 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10642 else
10644 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10645 if (ret)
10647 /* Propagate location information from original call to
10648 expansion of builtin. Otherwise things like
10649 maybe_emit_chk_warning, that operate on the expansion
10650 of a builtin, will use the wrong location information. */
10651 if (gimple_has_location (stmt))
10653 tree realret = ret;
10654 if (TREE_CODE (ret) == NOP_EXPR)
10655 realret = TREE_OPERAND (ret, 0);
10656 if (CAN_HAVE_LOCATION_P (realret)
10657 && !EXPR_HAS_LOCATION (realret))
10658 SET_EXPR_LOCATION (realret, loc);
10659 return realret;
10661 return ret;
10665 return NULL_TREE;
10668 /* Look up the function in builtin_decl that corresponds to DECL
10669 and set ASMSPEC as its user assembler name. DECL must be a
10670 function decl that declares a builtin. */
10672 void
10673 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10675 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10676 && asmspec != 0);
10678 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10679 set_user_assembler_name (builtin, asmspec);
10681 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10682 && INT_TYPE_SIZE < BITS_PER_WORD)
10684 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10685 set_user_assembler_libfunc ("ffs", asmspec);
10686 set_optab_libfunc (ffs_optab, mode, "ffs");
10690 /* Return true if DECL is a builtin that expands to a constant or similarly
10691 simple code. */
10692 bool
10693 is_simple_builtin (tree decl)
10695 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10696 switch (DECL_FUNCTION_CODE (decl))
10698 /* Builtins that expand to constants. */
10699 case BUILT_IN_CONSTANT_P:
10700 case BUILT_IN_EXPECT:
10701 case BUILT_IN_OBJECT_SIZE:
10702 case BUILT_IN_UNREACHABLE:
10703 /* Simple register moves or loads from stack. */
10704 case BUILT_IN_ASSUME_ALIGNED:
10705 case BUILT_IN_RETURN_ADDRESS:
10706 case BUILT_IN_EXTRACT_RETURN_ADDR:
10707 case BUILT_IN_FROB_RETURN_ADDR:
10708 case BUILT_IN_RETURN:
10709 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10710 case BUILT_IN_FRAME_ADDRESS:
10711 case BUILT_IN_VA_END:
10712 case BUILT_IN_STACK_SAVE:
10713 case BUILT_IN_STACK_RESTORE:
10714 /* Exception state returns or moves registers around. */
10715 case BUILT_IN_EH_FILTER:
10716 case BUILT_IN_EH_POINTER:
10717 case BUILT_IN_EH_COPY_VALUES:
10718 return true;
10720 default:
10721 return false;
10724 return false;
10727 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10728 most probably expanded inline into reasonably simple code. This is a
10729 superset of is_simple_builtin. */
10730 bool
10731 is_inexpensive_builtin (tree decl)
10733 if (!decl)
10734 return false;
10735 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10736 return true;
10737 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10738 switch (DECL_FUNCTION_CODE (decl))
10740 case BUILT_IN_ABS:
10741 CASE_BUILT_IN_ALLOCA:
10742 case BUILT_IN_BSWAP16:
10743 case BUILT_IN_BSWAP32:
10744 case BUILT_IN_BSWAP64:
10745 case BUILT_IN_BSWAP128:
10746 case BUILT_IN_CLZ:
10747 case BUILT_IN_CLZIMAX:
10748 case BUILT_IN_CLZL:
10749 case BUILT_IN_CLZLL:
10750 case BUILT_IN_CTZ:
10751 case BUILT_IN_CTZIMAX:
10752 case BUILT_IN_CTZL:
10753 case BUILT_IN_CTZLL:
10754 case BUILT_IN_FFS:
10755 case BUILT_IN_FFSIMAX:
10756 case BUILT_IN_FFSL:
10757 case BUILT_IN_FFSLL:
10758 case BUILT_IN_IMAXABS:
10759 case BUILT_IN_FINITE:
10760 case BUILT_IN_FINITEF:
10761 case BUILT_IN_FINITEL:
10762 case BUILT_IN_FINITED32:
10763 case BUILT_IN_FINITED64:
10764 case BUILT_IN_FINITED128:
10765 case BUILT_IN_FPCLASSIFY:
10766 case BUILT_IN_ISFINITE:
10767 case BUILT_IN_ISINF_SIGN:
10768 case BUILT_IN_ISINF:
10769 case BUILT_IN_ISINFF:
10770 case BUILT_IN_ISINFL:
10771 case BUILT_IN_ISINFD32:
10772 case BUILT_IN_ISINFD64:
10773 case BUILT_IN_ISINFD128:
10774 case BUILT_IN_ISNAN:
10775 case BUILT_IN_ISNANF:
10776 case BUILT_IN_ISNANL:
10777 case BUILT_IN_ISNAND32:
10778 case BUILT_IN_ISNAND64:
10779 case BUILT_IN_ISNAND128:
10780 case BUILT_IN_ISNORMAL:
10781 case BUILT_IN_ISGREATER:
10782 case BUILT_IN_ISGREATEREQUAL:
10783 case BUILT_IN_ISLESS:
10784 case BUILT_IN_ISLESSEQUAL:
10785 case BUILT_IN_ISLESSGREATER:
10786 case BUILT_IN_ISUNORDERED:
10787 case BUILT_IN_VA_ARG_PACK:
10788 case BUILT_IN_VA_ARG_PACK_LEN:
10789 case BUILT_IN_VA_COPY:
10790 case BUILT_IN_TRAP:
10791 case BUILT_IN_SAVEREGS:
10792 case BUILT_IN_POPCOUNTL:
10793 case BUILT_IN_POPCOUNTLL:
10794 case BUILT_IN_POPCOUNTIMAX:
10795 case BUILT_IN_POPCOUNT:
10796 case BUILT_IN_PARITYL:
10797 case BUILT_IN_PARITYLL:
10798 case BUILT_IN_PARITYIMAX:
10799 case BUILT_IN_PARITY:
10800 case BUILT_IN_LABS:
10801 case BUILT_IN_LLABS:
10802 case BUILT_IN_PREFETCH:
10803 case BUILT_IN_ACC_ON_DEVICE:
10804 return true;
10806 default:
10807 return is_simple_builtin (decl);
10810 return false;
10813 /* Return true if T is a constant and the value cast to a target char
10814 can be represented by a host char.
10815 Store the casted char constant in *P if so. */
10817 bool
10818 target_char_cst_p (tree t, char *p)
10820 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10821 return false;
10823 *p = (char)tree_to_uhwi (t);
10824 return true;
10827 /* Return true if the builtin DECL is implemented in a standard library.
10828 Otherwise return false which doesn't guarantee it is not (thus the list
10829 of handled builtins below may be incomplete). */
10831 bool
10832 builtin_with_linkage_p (tree decl)
10834 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10835 switch (DECL_FUNCTION_CODE (decl))
10837 CASE_FLT_FN (BUILT_IN_ACOS):
10838 CASE_FLT_FN (BUILT_IN_ACOSH):
10839 CASE_FLT_FN (BUILT_IN_ASIN):
10840 CASE_FLT_FN (BUILT_IN_ASINH):
10841 CASE_FLT_FN (BUILT_IN_ATAN):
10842 CASE_FLT_FN (BUILT_IN_ATANH):
10843 CASE_FLT_FN (BUILT_IN_ATAN2):
10844 CASE_FLT_FN (BUILT_IN_CBRT):
10845 CASE_FLT_FN (BUILT_IN_CEIL):
10846 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
10847 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10848 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
10849 CASE_FLT_FN (BUILT_IN_COS):
10850 CASE_FLT_FN (BUILT_IN_COSH):
10851 CASE_FLT_FN (BUILT_IN_ERF):
10852 CASE_FLT_FN (BUILT_IN_ERFC):
10853 CASE_FLT_FN (BUILT_IN_EXP):
10854 CASE_FLT_FN (BUILT_IN_EXP2):
10855 CASE_FLT_FN (BUILT_IN_EXPM1):
10856 CASE_FLT_FN (BUILT_IN_FABS):
10857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10858 CASE_FLT_FN (BUILT_IN_FDIM):
10859 CASE_FLT_FN (BUILT_IN_FLOOR):
10860 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
10861 CASE_FLT_FN (BUILT_IN_FMA):
10862 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
10863 CASE_FLT_FN (BUILT_IN_FMAX):
10864 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
10865 CASE_FLT_FN (BUILT_IN_FMIN):
10866 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
10867 CASE_FLT_FN (BUILT_IN_FMOD):
10868 CASE_FLT_FN (BUILT_IN_FREXP):
10869 CASE_FLT_FN (BUILT_IN_HYPOT):
10870 CASE_FLT_FN (BUILT_IN_ILOGB):
10871 CASE_FLT_FN (BUILT_IN_LDEXP):
10872 CASE_FLT_FN (BUILT_IN_LGAMMA):
10873 CASE_FLT_FN (BUILT_IN_LLRINT):
10874 CASE_FLT_FN (BUILT_IN_LLROUND):
10875 CASE_FLT_FN (BUILT_IN_LOG):
10876 CASE_FLT_FN (BUILT_IN_LOG10):
10877 CASE_FLT_FN (BUILT_IN_LOG1P):
10878 CASE_FLT_FN (BUILT_IN_LOG2):
10879 CASE_FLT_FN (BUILT_IN_LOGB):
10880 CASE_FLT_FN (BUILT_IN_LRINT):
10881 CASE_FLT_FN (BUILT_IN_LROUND):
10882 CASE_FLT_FN (BUILT_IN_MODF):
10883 CASE_FLT_FN (BUILT_IN_NAN):
10884 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10885 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
10886 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
10887 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
10888 CASE_FLT_FN (BUILT_IN_POW):
10889 CASE_FLT_FN (BUILT_IN_REMAINDER):
10890 CASE_FLT_FN (BUILT_IN_REMQUO):
10891 CASE_FLT_FN (BUILT_IN_RINT):
10892 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
10893 CASE_FLT_FN (BUILT_IN_ROUND):
10894 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
10895 CASE_FLT_FN (BUILT_IN_SCALBLN):
10896 CASE_FLT_FN (BUILT_IN_SCALBN):
10897 CASE_FLT_FN (BUILT_IN_SIN):
10898 CASE_FLT_FN (BUILT_IN_SINH):
10899 CASE_FLT_FN (BUILT_IN_SINCOS):
10900 CASE_FLT_FN (BUILT_IN_SQRT):
10901 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
10902 CASE_FLT_FN (BUILT_IN_TAN):
10903 CASE_FLT_FN (BUILT_IN_TANH):
10904 CASE_FLT_FN (BUILT_IN_TGAMMA):
10905 CASE_FLT_FN (BUILT_IN_TRUNC):
10906 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
10907 return true;
10909 case BUILT_IN_STPCPY:
10910 case BUILT_IN_STPNCPY:
10911 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
10912 by libiberty's stpcpy.c for MinGW targets so we need to return true
10913 in order to be able to build libiberty in LTO mode for them. */
10914 return true;
10916 default:
10917 break;
10919 return false;
10922 /* Return true if OFFRNG is bounded to a subrange of offset values
10923 valid for the largest possible object. */
10925 bool
10926 access_ref::offset_bounded () const
10928 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
10929 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
10930 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
10933 /* If CALLEE has known side effects, fill in INFO and return true.
10934 See tree-ssa-structalias.c:find_func_aliases
10935 for the list of builtins we might need to handle here. */
10937 attr_fnspec
10938 builtin_fnspec (tree callee)
10940 built_in_function code = DECL_FUNCTION_CODE (callee);
10942 switch (code)
10944 /* All the following functions read memory pointed to by
10945 their second argument and write memory pointed to by first
10946 argument.
10947 strcat/strncat additionally reads memory pointed to by the first
10948 argument. */
10949 case BUILT_IN_STRCAT:
10950 case BUILT_IN_STRCAT_CHK:
10951 return "1cW 1 ";
10952 case BUILT_IN_STRNCAT:
10953 case BUILT_IN_STRNCAT_CHK:
10954 return "1cW 13";
10955 case BUILT_IN_STRCPY:
10956 case BUILT_IN_STRCPY_CHK:
10957 return "1cO 1 ";
10958 case BUILT_IN_STPCPY:
10959 case BUILT_IN_STPCPY_CHK:
10960 return ".cO 1 ";
10961 case BUILT_IN_STRNCPY:
10962 case BUILT_IN_MEMCPY:
10963 case BUILT_IN_MEMMOVE:
10964 case BUILT_IN_TM_MEMCPY:
10965 case BUILT_IN_TM_MEMMOVE:
10966 case BUILT_IN_STRNCPY_CHK:
10967 case BUILT_IN_MEMCPY_CHK:
10968 case BUILT_IN_MEMMOVE_CHK:
10969 return "1cO313";
10970 case BUILT_IN_MEMPCPY:
10971 case BUILT_IN_MEMPCPY_CHK:
10972 return ".cO313";
10973 case BUILT_IN_STPNCPY:
10974 case BUILT_IN_STPNCPY_CHK:
10975 return ".cO313";
10976 case BUILT_IN_BCOPY:
10977 return ".c23O3";
10978 case BUILT_IN_BZERO:
10979 return ".cO2";
10980 case BUILT_IN_MEMCMP:
10981 case BUILT_IN_MEMCMP_EQ:
10982 case BUILT_IN_BCMP:
10983 case BUILT_IN_STRNCMP:
10984 case BUILT_IN_STRNCMP_EQ:
10985 case BUILT_IN_STRNCASECMP:
10986 return ".cR3R3";
10988 /* The following functions read memory pointed to by their
10989 first argument. */
10990 CASE_BUILT_IN_TM_LOAD (1):
10991 CASE_BUILT_IN_TM_LOAD (2):
10992 CASE_BUILT_IN_TM_LOAD (4):
10993 CASE_BUILT_IN_TM_LOAD (8):
10994 CASE_BUILT_IN_TM_LOAD (FLOAT):
10995 CASE_BUILT_IN_TM_LOAD (DOUBLE):
10996 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
10997 CASE_BUILT_IN_TM_LOAD (M64):
10998 CASE_BUILT_IN_TM_LOAD (M128):
10999 CASE_BUILT_IN_TM_LOAD (M256):
11000 case BUILT_IN_TM_LOG:
11001 case BUILT_IN_TM_LOG_1:
11002 case BUILT_IN_TM_LOG_2:
11003 case BUILT_IN_TM_LOG_4:
11004 case BUILT_IN_TM_LOG_8:
11005 case BUILT_IN_TM_LOG_FLOAT:
11006 case BUILT_IN_TM_LOG_DOUBLE:
11007 case BUILT_IN_TM_LOG_LDOUBLE:
11008 case BUILT_IN_TM_LOG_M64:
11009 case BUILT_IN_TM_LOG_M128:
11010 case BUILT_IN_TM_LOG_M256:
11011 return ".cR ";
11013 case BUILT_IN_INDEX:
11014 case BUILT_IN_RINDEX:
11015 case BUILT_IN_STRCHR:
11016 case BUILT_IN_STRLEN:
11017 case BUILT_IN_STRRCHR:
11018 return ".cR ";
11019 case BUILT_IN_STRNLEN:
11020 return ".cR2";
11022 /* These read memory pointed to by the first argument.
11023 Allocating memory does not have any side-effects apart from
11024 being the definition point for the pointer.
11025 Unix98 specifies that errno is set on allocation failure. */
11026 case BUILT_IN_STRDUP:
11027 return "mCR ";
11028 case BUILT_IN_STRNDUP:
11029 return "mCR2";
11030 /* Allocating memory does not have any side-effects apart from
11031 being the definition point for the pointer. */
11032 case BUILT_IN_MALLOC:
11033 case BUILT_IN_ALIGNED_ALLOC:
11034 case BUILT_IN_CALLOC:
11035 case BUILT_IN_GOMP_ALLOC:
11036 return "mC";
11037 CASE_BUILT_IN_ALLOCA:
11038 return "mc";
11039 /* These read memory pointed to by the first argument with size
11040 in the third argument. */
11041 case BUILT_IN_MEMCHR:
11042 return ".cR3";
11043 /* These read memory pointed to by the first and second arguments. */
11044 case BUILT_IN_STRSTR:
11045 case BUILT_IN_STRPBRK:
11046 case BUILT_IN_STRCASECMP:
11047 case BUILT_IN_STRCSPN:
11048 case BUILT_IN_STRSPN:
11049 case BUILT_IN_STRCMP:
11050 case BUILT_IN_STRCMP_EQ:
11051 return ".cR R ";
11052 /* Freeing memory kills the pointed-to memory. More importantly
11053 the call has to serve as a barrier for moving loads and stores
11054 across it. */
11055 case BUILT_IN_STACK_RESTORE:
11056 case BUILT_IN_FREE:
11057 case BUILT_IN_GOMP_FREE:
11058 return ".co ";
11059 case BUILT_IN_VA_END:
11060 return ".cO ";
11061 /* Realloc serves both as allocation point and deallocation point. */
11062 case BUILT_IN_REALLOC:
11063 return ".Cw ";
11064 case BUILT_IN_GAMMA_R:
11065 case BUILT_IN_GAMMAF_R:
11066 case BUILT_IN_GAMMAL_R:
11067 case BUILT_IN_LGAMMA_R:
11068 case BUILT_IN_LGAMMAF_R:
11069 case BUILT_IN_LGAMMAL_R:
11070 return ".C. Ot";
11071 case BUILT_IN_FREXP:
11072 case BUILT_IN_FREXPF:
11073 case BUILT_IN_FREXPL:
11074 case BUILT_IN_MODF:
11075 case BUILT_IN_MODFF:
11076 case BUILT_IN_MODFL:
11077 return ".c. Ot";
11078 case BUILT_IN_REMQUO:
11079 case BUILT_IN_REMQUOF:
11080 case BUILT_IN_REMQUOL:
11081 return ".c. . Ot";
11082 case BUILT_IN_SINCOS:
11083 case BUILT_IN_SINCOSF:
11084 case BUILT_IN_SINCOSL:
11085 return ".c. OtOt";
11086 case BUILT_IN_MEMSET:
11087 case BUILT_IN_MEMSET_CHK:
11088 case BUILT_IN_TM_MEMSET:
11089 return "1cO3";
11090 CASE_BUILT_IN_TM_STORE (1):
11091 CASE_BUILT_IN_TM_STORE (2):
11092 CASE_BUILT_IN_TM_STORE (4):
11093 CASE_BUILT_IN_TM_STORE (8):
11094 CASE_BUILT_IN_TM_STORE (FLOAT):
11095 CASE_BUILT_IN_TM_STORE (DOUBLE):
11096 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11097 CASE_BUILT_IN_TM_STORE (M64):
11098 CASE_BUILT_IN_TM_STORE (M128):
11099 CASE_BUILT_IN_TM_STORE (M256):
11100 return ".cO ";
11101 case BUILT_IN_STACK_SAVE:
11102 return ".c";
11103 case BUILT_IN_ASSUME_ALIGNED:
11104 return "1cX ";
11105 /* But posix_memalign stores a pointer into the memory pointed to
11106 by its first argument. */
11107 case BUILT_IN_POSIX_MEMALIGN:
11108 return ".cOt";
11110 default:
11111 return "";