middle-end/112830 - avoid gimplifying non-default addr-space assign to memcpy
[official-gcc.git] / gcc / builtins.cc
blobaa86ac1545ded0fc8fff54b837c6a8159bfec540
1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
76 #include "tree-dfa.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
122 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
123 optab);
124 static rtx expand_builtin_cexpi (tree, rtx);
125 static rtx expand_builtin_issignaling (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx expand_builtin_memcpy (tree, rtx);
136 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 rtx target, tree exp,
138 memop_ret retmode,
139 bool might_overlap);
140 static rtx expand_builtin_memmove (tree, rtx);
141 static rtx expand_builtin_mempcpy (tree, rtx);
142 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_iseqsig (location_t, tree, tree);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 return (startswith (name, "__builtin_")
204 || startswith (name, "__sync_")
205 || startswith (name, "__atomic_"));
208 /* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
212 bool
213 called_as_built_in (tree node)
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
217 will have. */
218 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
219 return is_builtin_name (name);
222 /* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
232 whereas foo() itself starts on an even address.
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
237 bool
238 get_object_alignment_2 (tree exp, unsigned int *alignp,
239 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 poly_int64 bitsize, bitpos;
242 tree offset;
243 machine_mode mode;
244 int unsignedp, reversep, volatilep;
245 unsigned int align = BITS_PER_UNIT;
246 bool known_alignment = false;
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
250 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
251 &unsignedp, &reversep, &volatilep);
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
255 if (TREE_CODE (exp) == FUNCTION_DECL)
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
262 align = 2 * BITS_PER_UNIT;
264 else if (TREE_CODE (exp) == LABEL_DECL)
266 else if (TREE_CODE (exp) == CONST_DECL)
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp = DECL_INITIAL (exp);
270 align = TYPE_ALIGN (TREE_TYPE (exp));
271 if (CONSTANT_CLASS_P (exp))
272 align = targetm.constant_alignment (exp, align);
274 known_alignment = true;
276 else if (DECL_P (exp))
278 align = DECL_ALIGN (exp);
279 known_alignment = true;
281 else if (TREE_CODE (exp) == INDIRECT_REF
282 || TREE_CODE (exp) == MEM_REF
283 || TREE_CODE (exp) == TARGET_MEM_REF)
285 tree addr = TREE_OPERAND (exp, 0);
286 unsigned ptr_align;
287 unsigned HOST_WIDE_INT ptr_bitpos;
288 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 /* If the address is explicitely aligned, handle that. */
291 if (TREE_CODE (addr) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
295 ptr_bitmask *= BITS_PER_UNIT;
296 align = least_bit_hwi (ptr_bitmask);
297 addr = TREE_OPERAND (addr, 0);
300 known_alignment
301 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
302 align = MAX (ptr_align, align);
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos &= ptr_bitmask;
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
309 if (TREE_CODE (exp) == TARGET_MEM_REF)
311 if (TMR_INDEX (exp))
313 unsigned HOST_WIDE_INT step = 1;
314 if (TMR_STEP (exp))
315 step = TREE_INT_CST_LOW (TMR_STEP (exp));
316 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 if (TMR_INDEX2 (exp))
319 align = BITS_PER_UNIT;
320 known_alignment = false;
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
326 alignment knowledge and if using that alignment would
327 improve the situation. */
328 unsigned int talign;
329 if (!addr_p && !known_alignment
330 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
331 && talign > align)
332 align = talign;
333 else
335 /* Else adjust bitpos accordingly. */
336 bitpos += ptr_bitpos;
337 if (TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
339 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
342 else if (TREE_CODE (exp) == STRING_CST)
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 if (CONSTANT_CLASS_P (exp))
348 align = targetm.constant_alignment (exp, align);
350 known_alignment = true;
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
355 if (offset)
357 unsigned int trailing_zeros = tree_ctz (offset);
358 if (trailing_zeros < HOST_BITS_PER_INT)
360 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
361 if (inner)
362 align = MIN (align, inner);
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
369 if (alt_align != 0 && alt_align < align)
371 align = alt_align;
372 known_alignment = false;
375 *alignp = align;
376 *bitposp = bitpos.coeffs[0] & (align - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
390 with it. */
391 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
392 exp = TREE_OPERAND (exp, 0);
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* Compute the length of a null-terminated character string or wide
547 character string handling character sizes of 1, 2, and 4 bytes.
548 TREE_STRING_LENGTH is not the right way because it evaluates to
549 the size of the character array in bytes (as opposed to characters)
550 and because it can contain a zero byte in the middle.
552 ONLY_VALUE should be nonzero if the result is not going to be emitted
553 into the instruction stream and zero if it is going to be expanded.
554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
555 is returned, otherwise NULL, since
556 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
557 evaluate the side-effects.
559 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
560 accesses. Note that this implies the result is not going to be emitted
561 into the instruction stream.
563 Additional information about the string accessed may be recorded
564 in DATA. For example, if ARG references an unterminated string,
565 then the declaration will be stored in the DECL field. If the
566 length of the unterminated string can be determined, it'll be
567 stored in the LEN field. Note this length could well be different
568 than what a C strlen call would return.
570 ELTSIZE is 1 for normal single byte character strings, and 2 or
571 4 for wide characer strings. ELTSIZE is by default 1.
573 The value returned is of type `ssizetype'. */
575 tree
576 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
578 /* If we were not passed a DATA pointer, then get one to a local
579 structure. That avoids having to check DATA for NULL before
580 each time we want to use it. */
581 c_strlen_data local_strlen_data = { };
582 if (!data)
583 data = &local_strlen_data;
585 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
587 tree src = STRIP_NOPS (arg);
588 if (TREE_CODE (src) == COND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 tree len1, len2;
593 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
594 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
595 if (tree_int_cst_equal (len1, len2))
596 return len1;
599 if (TREE_CODE (src) == COMPOUND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
601 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
603 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
605 /* Offset from the beginning of the string in bytes. */
606 tree byteoff;
607 tree memsize;
608 tree decl;
609 src = string_constant (src, &byteoff, &memsize, &decl);
610 if (src == 0)
611 return NULL_TREE;
613 /* Determine the size of the string element. */
614 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
615 return NULL_TREE;
617 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
618 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
619 in case the latter is less than the size of the array, such as when
620 SRC refers to a short string literal used to initialize a large array.
621 In that case, the elements of the array after the terminating NUL are
622 all NUL. */
623 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
624 strelts = strelts / eltsize;
626 if (!tree_fits_uhwi_p (memsize))
627 return NULL_TREE;
629 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
631 /* PTR can point to the byte representation of any string type, including
632 char* and wchar_t*. */
633 const char *ptr = TREE_STRING_POINTER (src);
635 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
637 /* The code below works only for single byte character types. */
638 if (eltsize != 1)
639 return NULL_TREE;
641 /* If the string has an internal NUL character followed by any
642 non-NUL characters (e.g., "foo\0bar"), we can't compute
643 the offset to the following NUL if we don't know where to
644 start searching for it. */
645 unsigned len = string_length (ptr, eltsize, strelts);
647 /* Return when an embedded null character is found or none at all.
648 In the latter case, set the DECL/LEN field in the DATA structure
649 so that callers may examine them. */
650 if (len + 1 < strelts)
651 return NULL_TREE;
652 else if (len >= maxelts)
654 data->decl = decl;
655 data->off = byteoff;
656 data->minlen = ssize_int (len);
657 return NULL_TREE;
660 /* For empty strings the result should be zero. */
661 if (len == 0)
662 return ssize_int (0);
664 /* We don't know the starting offset, but we do know that the string
665 has no internal zero bytes. If the offset falls within the bounds
666 of the string subtract the offset from the length of the string,
667 and return that. Otherwise the length is zero. Take care to
668 use SAVE_EXPR in case the OFFSET has side-effects. */
669 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
670 : byteoff;
671 offsave = fold_convert_loc (loc, sizetype, offsave);
672 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
673 size_int (len));
674 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
675 offsave);
676 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
677 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
678 build_zero_cst (ssizetype));
681 /* Offset from the beginning of the string in elements. */
682 HOST_WIDE_INT eltoff;
684 /* We have a known offset into the string. Start searching there for
685 a null character if we can represent it as a single HOST_WIDE_INT. */
686 if (byteoff == 0)
687 eltoff = 0;
688 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
689 eltoff = -1;
690 else
691 eltoff = tree_to_uhwi (byteoff) / eltsize;
693 /* If the offset is known to be out of bounds, warn, and call strlen at
694 runtime. */
695 if (eltoff < 0 || eltoff >= maxelts)
697 /* Suppress multiple warnings for propagated constant strings. */
698 if (only_value != 2
699 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
700 && warning_at (loc, OPT_Warray_bounds_,
701 "offset %qwi outside bounds of constant string",
702 eltoff))
704 if (decl)
705 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
706 suppress_warning (arg, OPT_Warray_bounds_);
708 return NULL_TREE;
711 /* If eltoff is larger than strelts but less than maxelts the
712 string length is zero, since the excess memory will be zero. */
713 if (eltoff > strelts)
714 return ssize_int (0);
716 /* Use strlen to search for the first zero byte. Since any strings
717 constructed with build_string will have nulls appended, we win even
718 if we get handed something like (char[4])"abcd".
720 Since ELTOFF is our starting index into the string, no further
721 calculation is needed. */
722 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
723 strelts - eltoff);
725 /* Don't know what to return if there was no zero termination.
726 Ideally this would turn into a gcc_checking_assert over time.
727 Set DECL/LEN so callers can examine them. */
728 if (len >= maxelts - eltoff)
730 data->decl = decl;
731 data->off = byteoff;
732 data->minlen = ssize_int (len);
733 return NULL_TREE;
736 return ssize_int (len);
739 /* Return a constant integer corresponding to target reading
740 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
741 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
742 are assumed to be zero, otherwise it reads as many characters
743 as needed. */
746 c_readstr (const char *str, fixed_size_mode mode,
747 bool null_terminated_p/*=true*/)
749 auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
751 bytes.reserve (GET_MODE_SIZE (mode));
753 target_unit ch = 1;
754 for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
756 if (ch || !null_terminated_p)
757 ch = (unsigned char) str[i];
758 bytes.quick_push (ch);
761 return native_decode_rtx (mode, bytes, 0);
764 /* Cast a target constant CST to target CHAR and if that value fits into
765 host char type, return zero and put that value into variable pointed to by
766 P. */
768 static int
769 target_char_cast (tree cst, char *p)
771 unsigned HOST_WIDE_INT val, hostval;
773 if (TREE_CODE (cst) != INTEGER_CST
774 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
775 return 1;
777 /* Do not care if it fits or not right here. */
778 val = TREE_INT_CST_LOW (cst);
780 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
781 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
783 hostval = val;
784 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
785 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
787 if (val != hostval)
788 return 1;
790 *p = hostval;
791 return 0;
794 /* Similar to save_expr, but assumes that arbitrary code is not executed
795 in between the multiple evaluations. In particular, we assume that a
796 non-addressable local variable will not be modified. */
798 static tree
799 builtin_save_expr (tree exp)
801 if (TREE_CODE (exp) == SSA_NAME
802 || (TREE_ADDRESSABLE (exp) == 0
803 && (TREE_CODE (exp) == PARM_DECL
804 || (VAR_P (exp) && !TREE_STATIC (exp)))))
805 return exp;
807 return save_expr (exp);
810 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
811 times to get the address of either a higher stack frame, or a return
812 address located within it (depending on FNDECL_CODE). */
814 static rtx
815 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
817 int i;
818 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
819 if (tem == NULL_RTX)
821 /* For a zero count with __builtin_return_address, we don't care what
822 frame address we return, because target-specific definitions will
823 override us. Therefore frame pointer elimination is OK, and using
824 the soft frame pointer is OK.
826 For a nonzero count, or a zero count with __builtin_frame_address,
827 we require a stable offset from the current frame pointer to the
828 previous one, so we must use the hard frame pointer, and
829 we must disable frame pointer elimination. */
830 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
831 tem = frame_pointer_rtx;
832 else
834 tem = hard_frame_pointer_rtx;
836 /* Tell reload not to eliminate the frame pointer. */
837 crtl->accesses_prior_frames = 1;
841 if (count > 0)
842 SETUP_FRAME_ADDRESSES ();
844 /* On the SPARC, the return address is not in the frame, it is in a
845 register. There is no way to access it off of the current frame
846 pointer, but it can be accessed off the previous frame pointer by
847 reading the value from the register window save area. */
848 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
849 count--;
851 /* Scan back COUNT frames to the specified frame. */
852 for (i = 0; i < count; i++)
854 /* Assume the dynamic chain pointer is in the word that the
855 frame address points to, unless otherwise specified. */
856 tem = DYNAMIC_CHAIN_ADDRESS (tem);
857 tem = memory_address (Pmode, tem);
858 tem = gen_frame_mem (Pmode, tem);
859 tem = copy_to_reg (tem);
862 /* For __builtin_frame_address, return what we've got. But, on
863 the SPARC for example, we may have to add a bias. */
864 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
865 return FRAME_ADDR_RTX (tem);
867 /* For __builtin_return_address, get the return address from that frame. */
868 #ifdef RETURN_ADDR_RTX
869 tem = RETURN_ADDR_RTX (count, tem);
870 #else
871 tem = memory_address (Pmode,
872 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
873 tem = gen_frame_mem (Pmode, tem);
874 #endif
875 return tem;
878 /* Alias set used for setjmp buffer. */
879 static alias_set_type setjmp_alias_set = -1;
881 /* Construct the leading half of a __builtin_setjmp call. Control will
882 return to RECEIVER_LABEL. This is also called directly by the SJLJ
883 exception handling code. */
885 void
886 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
888 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
889 rtx stack_save;
890 rtx mem;
892 if (setjmp_alias_set == -1)
893 setjmp_alias_set = new_alias_set ();
895 buf_addr = convert_memory_address (Pmode, buf_addr);
897 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
899 /* We store the frame pointer and the address of receiver_label in
900 the buffer and use the rest of it for the stack save area, which
901 is machine-dependent. */
903 mem = gen_rtx_MEM (Pmode, buf_addr);
904 set_mem_alias_set (mem, setjmp_alias_set);
905 emit_move_insn (mem, hard_frame_pointer_rtx);
907 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
908 GET_MODE_SIZE (Pmode))),
909 set_mem_alias_set (mem, setjmp_alias_set);
911 emit_move_insn (validize_mem (mem),
912 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
914 stack_save = gen_rtx_MEM (sa_mode,
915 plus_constant (Pmode, buf_addr,
916 2 * GET_MODE_SIZE (Pmode)));
917 set_mem_alias_set (stack_save, setjmp_alias_set);
918 emit_stack_save (SAVE_NONLOCAL, &stack_save);
920 /* If there is further processing to do, do it. */
921 if (targetm.have_builtin_setjmp_setup ())
922 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
924 /* We have a nonlocal label. */
925 cfun->has_nonlocal_label = 1;
928 /* Construct the trailing part of a __builtin_setjmp call. This is
929 also called directly by the SJLJ exception handling code.
930 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
932 void
933 expand_builtin_setjmp_receiver (rtx receiver_label)
935 rtx chain;
937 /* Mark the FP as used when we get here, so we have to make sure it's
938 marked as used by this function. */
939 emit_use (hard_frame_pointer_rtx);
941 /* Mark the static chain as clobbered here so life information
942 doesn't get messed up for it. */
943 chain = rtx_for_static_chain (current_function_decl, true);
944 if (chain && REG_P (chain))
945 emit_clobber (chain);
947 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
949 /* If the argument pointer can be eliminated in favor of the
950 frame pointer, we don't need to restore it. We assume here
951 that if such an elimination is present, it can always be used.
952 This is the case on all known machines; if we don't make this
953 assumption, we do unnecessary saving on many machines. */
954 size_t i;
955 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
957 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
958 if (elim_regs[i].from == ARG_POINTER_REGNUM
959 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
960 break;
962 if (i == ARRAY_SIZE (elim_regs))
964 /* Now restore our arg pointer from the address at which it
965 was saved in our stack frame. */
966 emit_move_insn (crtl->args.internal_arg_pointer,
967 copy_to_reg (get_arg_pointer_save_area ()));
971 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
972 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
973 else if (targetm.have_nonlocal_goto_receiver ())
974 emit_insn (targetm.gen_nonlocal_goto_receiver ());
975 else
976 { /* Nothing */ }
978 /* We must not allow the code we just generated to be reordered by
979 scheduling. Specifically, the update of the frame pointer must
980 happen immediately, not later. */
981 emit_insn (gen_blockage ());
984 /* __builtin_longjmp is passed a pointer to an array of five words (not
985 all will be used on all machines). It operates similarly to the C
986 library function of the same name, but is more efficient. Much of
987 the code below is copied from the handling of non-local gotos. */
989 static void
990 expand_builtin_longjmp (rtx buf_addr, rtx value)
992 rtx fp, lab, stack;
993 rtx_insn *insn, *last;
994 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996 /* DRAP is needed for stack realign if longjmp is expanded to current
997 function */
998 if (SUPPORTS_STACK_ALIGNMENT)
999 crtl->need_drap = true;
1001 if (setjmp_alias_set == -1)
1002 setjmp_alias_set = new_alias_set ();
1004 buf_addr = convert_memory_address (Pmode, buf_addr);
1006 buf_addr = force_reg (Pmode, buf_addr);
1008 /* We require that the user must pass a second argument of 1, because
1009 that is what builtin_setjmp will return. */
1010 gcc_assert (value == const1_rtx);
1012 last = get_last_insn ();
1013 if (targetm.have_builtin_longjmp ())
1014 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1015 else
1017 fp = gen_rtx_MEM (Pmode, buf_addr);
1018 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1019 GET_MODE_SIZE (Pmode)));
1021 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1022 2 * GET_MODE_SIZE (Pmode)));
1023 set_mem_alias_set (fp, setjmp_alias_set);
1024 set_mem_alias_set (lab, setjmp_alias_set);
1025 set_mem_alias_set (stack, setjmp_alias_set);
1027 /* Pick up FP, label, and SP from the block and jump. This code is
1028 from expand_goto in stmt.cc; see there for detailed comments. */
1029 if (targetm.have_nonlocal_goto ())
1030 /* We have to pass a value to the nonlocal_goto pattern that will
1031 get copied into the static_chain pointer, but it does not matter
1032 what that value is, because builtin_setjmp does not use it. */
1033 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1034 else
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 lab = copy_to_reg (lab);
1041 /* Restore the frame pointer and stack pointer. We must use a
1042 temporary since the setjmp buffer may be a local. */
1043 fp = copy_to_reg (fp);
1044 emit_stack_restore (SAVE_NONLOCAL, stack);
1046 /* Ensure the frame pointer move is not optimized. */
1047 emit_insn (gen_blockage ());
1048 emit_clobber (hard_frame_pointer_rtx);
1049 emit_clobber (frame_pointer_rtx);
1050 emit_move_insn (hard_frame_pointer_rtx, fp);
1052 emit_use (hard_frame_pointer_rtx);
1053 emit_use (stack_pointer_rtx);
1054 emit_indirect_jump (lab);
1058 /* Search backwards and mark the jump insn as a non-local goto.
1059 Note that this precludes the use of __builtin_longjmp to a
1060 __builtin_setjmp target in the same function. However, we've
1061 already cautioned the user that these functions are for
1062 internal exception handling use only. */
1063 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1065 gcc_assert (insn != last);
1067 if (JUMP_P (insn))
1069 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1070 break;
1072 else if (CALL_P (insn))
1073 break;
1077 static inline bool
1078 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1080 return (iter->i < iter->n);
1083 /* This function validates the types of a function call argument list
1084 against a specified list of tree_codes. If the last specifier is a 0,
1085 that represents an ellipsis, otherwise the last specifier must be a
1086 VOID_TYPE. */
1088 static bool
1089 validate_arglist (const_tree callexpr, ...)
1091 enum tree_code code;
1092 bool res = 0;
1093 va_list ap;
1094 const_call_expr_arg_iterator iter;
1095 const_tree arg;
1097 va_start (ap, callexpr);
1098 init_const_call_expr_arg_iterator (callexpr, &iter);
1100 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1101 tree fn = CALL_EXPR_FN (callexpr);
1102 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1104 for (unsigned argno = 1; ; ++argno)
1106 code = (enum tree_code) va_arg (ap, int);
1108 switch (code)
1110 case 0:
1111 /* This signifies an ellipses, any further arguments are all ok. */
1112 res = true;
1113 goto end;
1114 case VOID_TYPE:
1115 /* This signifies an endlink, if no arguments remain, return
1116 true, otherwise return false. */
1117 res = !more_const_call_expr_args_p (&iter);
1118 goto end;
1119 case POINTER_TYPE:
1120 /* The actual argument must be nonnull when either the whole
1121 called function has been declared nonnull, or when the formal
1122 argument corresponding to the actual argument has been. */
1123 if (argmap
1124 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1126 arg = next_const_call_expr_arg (&iter);
1127 if (!validate_arg (arg, code) || integer_zerop (arg))
1128 goto end;
1129 break;
1131 /* FALLTHRU */
1132 default:
1133 /* If no parameters remain or the parameter's code does not
1134 match the specified code, return false. Otherwise continue
1135 checking any remaining arguments. */
1136 arg = next_const_call_expr_arg (&iter);
1137 if (!validate_arg (arg, code))
1138 goto end;
1139 break;
1143 /* We need gotos here since we can only have one VA_CLOSE in a
1144 function. */
1145 end: ;
1146 va_end (ap);
1148 BITMAP_FREE (argmap);
1150 return res;
1153 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1154 and the address of the save area. */
1156 static rtx
1157 expand_builtin_nonlocal_goto (tree exp)
1159 tree t_label, t_save_area;
1160 rtx r_label, r_save_area, r_fp, r_sp;
1161 rtx_insn *insn;
1163 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1164 return NULL_RTX;
1166 t_label = CALL_EXPR_ARG (exp, 0);
1167 t_save_area = CALL_EXPR_ARG (exp, 1);
1169 r_label = expand_normal (t_label);
1170 r_label = convert_memory_address (Pmode, r_label);
1171 r_save_area = expand_normal (t_save_area);
1172 r_save_area = convert_memory_address (Pmode, r_save_area);
1173 /* Copy the address of the save location to a register just in case it was
1174 based on the frame pointer. */
1175 r_save_area = copy_to_reg (r_save_area);
1176 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1177 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1178 plus_constant (Pmode, r_save_area,
1179 GET_MODE_SIZE (Pmode)));
1181 crtl->has_nonlocal_goto = 1;
1183 /* ??? We no longer need to pass the static chain value, afaik. */
1184 if (targetm.have_nonlocal_goto ())
1185 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1186 else
1188 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1189 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1191 r_label = copy_to_reg (r_label);
1193 /* Restore the frame pointer and stack pointer. We must use a
1194 temporary since the setjmp buffer may be a local. */
1195 r_fp = copy_to_reg (r_fp);
1196 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1198 /* Ensure the frame pointer move is not optimized. */
1199 emit_insn (gen_blockage ());
1200 emit_clobber (hard_frame_pointer_rtx);
1201 emit_clobber (frame_pointer_rtx);
1202 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1204 /* USE of hard_frame_pointer_rtx added for consistency;
1205 not clear if really needed. */
1206 emit_use (hard_frame_pointer_rtx);
1207 emit_use (stack_pointer_rtx);
1209 /* If the architecture is using a GP register, we must
1210 conservatively assume that the target function makes use of it.
1211 The prologue of functions with nonlocal gotos must therefore
1212 initialize the GP register to the appropriate value, and we
1213 must then make sure that this value is live at the point
1214 of the jump. (Note that this doesn't necessarily apply
1215 to targets with a nonlocal_goto pattern; they are free
1216 to implement it in their own way. Note also that this is
1217 a no-op if the GP register is a global invariant.) */
1218 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1219 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1220 emit_use (pic_offset_table_rtx);
1222 emit_indirect_jump (r_label);
1225 /* Search backwards to the jump insn and mark it as a
1226 non-local goto. */
1227 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1229 if (JUMP_P (insn))
1231 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1232 break;
1234 else if (CALL_P (insn))
1235 break;
1238 return const0_rtx;
1241 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1242 (not all will be used on all machines) that was passed to __builtin_setjmp.
1243 It updates the stack pointer in that block to the current value. This is
1244 also called directly by the SJLJ exception handling code. */
1246 void
1247 expand_builtin_update_setjmp_buf (rtx buf_addr)
1249 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1250 buf_addr = convert_memory_address (Pmode, buf_addr);
1251 rtx stack_save
1252 = gen_rtx_MEM (sa_mode,
1253 memory_address
1254 (sa_mode,
1255 plus_constant (Pmode, buf_addr,
1256 2 * GET_MODE_SIZE (Pmode))));
1258 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1261 /* Expand a call to __builtin_prefetch. For a target that does not support
1262 data prefetch, evaluate the memory address argument in case it has side
1263 effects. */
1265 static void
1266 expand_builtin_prefetch (tree exp)
1268 tree arg0, arg1, arg2;
1269 int nargs;
1270 rtx op0, op1, op2;
1272 if (!validate_arglist (exp, POINTER_TYPE, 0))
1273 return;
1275 arg0 = CALL_EXPR_ARG (exp, 0);
1277 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1278 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1279 locality). */
1280 nargs = call_expr_nargs (exp);
1281 if (nargs > 1)
1282 arg1 = CALL_EXPR_ARG (exp, 1);
1283 else
1284 arg1 = integer_zero_node;
1285 if (nargs > 2)
1286 arg2 = CALL_EXPR_ARG (exp, 2);
1287 else
1288 arg2 = integer_three_node;
1290 /* Argument 0 is an address. */
1291 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1293 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1294 if (TREE_CODE (arg1) != INTEGER_CST)
1296 error ("second argument to %<__builtin_prefetch%> must be a constant");
1297 arg1 = integer_zero_node;
1299 op1 = expand_normal (arg1);
1300 /* Argument 1 must be either zero or one. */
1301 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1303 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1304 " using zero");
1305 op1 = const0_rtx;
1308 /* Argument 2 (locality) must be a compile-time constant int. */
1309 if (TREE_CODE (arg2) != INTEGER_CST)
1311 error ("third argument to %<__builtin_prefetch%> must be a constant");
1312 arg2 = integer_zero_node;
1314 op2 = expand_normal (arg2);
1315 /* Argument 2 must be 0, 1, 2, or 3. */
1316 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1318 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1319 op2 = const0_rtx;
1322 if (targetm.have_prefetch ())
1324 class expand_operand ops[3];
1326 create_address_operand (&ops[0], op0);
1327 create_integer_operand (&ops[1], INTVAL (op1));
1328 create_integer_operand (&ops[2], INTVAL (op2));
1329 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1330 return;
1333 /* Don't do anything with direct references to volatile memory, but
1334 generate code to handle other side effects. */
1335 if (!MEM_P (op0) && side_effects_p (op0))
1336 emit_insn (op0);
1339 /* Get a MEM rtx for expression EXP which is the address of an operand
1340 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1341 the maximum length of the block of memory that might be accessed or
1342 NULL if unknown. */
1345 get_memory_rtx (tree exp, tree len)
1347 tree orig_exp = exp, base;
1348 rtx addr, mem;
1350 gcc_checking_assert
1351 (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1353 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1354 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1355 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1356 exp = TREE_OPERAND (exp, 0);
1358 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1359 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1361 /* Get an expression we can use to find the attributes to assign to MEM.
1362 First remove any nops. */
1363 while (CONVERT_EXPR_P (exp)
1364 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1365 exp = TREE_OPERAND (exp, 0);
1367 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1368 (as builtin stringops may alias with anything). */
1369 exp = fold_build2 (MEM_REF,
1370 build_array_type (char_type_node,
1371 build_range_type (sizetype,
1372 size_one_node, len)),
1373 exp, build_int_cst (ptr_type_node, 0));
1375 /* If the MEM_REF has no acceptable address, try to get the base object
1376 from the original address we got, and build an all-aliasing
1377 unknown-sized access to that one. */
1378 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1379 set_mem_attributes (mem, exp, 0);
1380 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1381 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1382 0))))
1384 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1385 exp = build_fold_addr_expr (base);
1386 exp = fold_build2 (MEM_REF,
1387 build_array_type (char_type_node,
1388 build_range_type (sizetype,
1389 size_zero_node,
1390 NULL)),
1391 exp, build_int_cst (ptr_type_node, 0));
1392 set_mem_attributes (mem, exp, 0);
1393 /* Since we stripped parts make sure the offset is unknown and the
1394 alignment is computed from the original address. */
1395 clear_mem_offset (mem);
1396 set_mem_align (mem, align);
1398 set_mem_alias_set (mem, 0);
1399 return mem;
1402 /* Built-in functions to perform an untyped call and return. */
1404 #define apply_args_mode \
1405 (this_target_builtins->x_apply_args_mode)
1406 #define apply_result_mode \
1407 (this_target_builtins->x_apply_result_mode)
1409 /* Return the size required for the block returned by __builtin_apply_args,
1410 and initialize apply_args_mode. */
1412 static int
1413 apply_args_size (void)
1415 static int size = -1;
1416 int align;
1417 unsigned int regno;
1419 /* The values computed by this function never change. */
1420 if (size < 0)
1422 /* The first value is the incoming arg-pointer. */
1423 size = GET_MODE_SIZE (Pmode);
1425 /* The second value is the structure value address unless this is
1426 passed as an "invisible" first argument. */
1427 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1428 size += GET_MODE_SIZE (Pmode);
1430 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1431 if (FUNCTION_ARG_REGNO_P (regno))
1433 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1435 if (mode != VOIDmode)
1437 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1438 if (size % align != 0)
1439 size = CEIL (size, align) * align;
1440 size += GET_MODE_SIZE (mode);
1441 apply_args_mode[regno] = mode;
1443 else
1444 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1446 else
1447 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1449 return size;
1452 /* Return the size required for the block returned by __builtin_apply,
1453 and initialize apply_result_mode. */
1455 static int
1456 apply_result_size (void)
1458 static int size = -1;
1459 int align, regno;
1461 /* The values computed by this function never change. */
1462 if (size < 0)
1464 size = 0;
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if (targetm.calls.function_value_regno_p (regno))
1469 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1471 if (mode != VOIDmode)
1473 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1474 if (size % align != 0)
1475 size = CEIL (size, align) * align;
1476 size += GET_MODE_SIZE (mode);
1477 apply_result_mode[regno] = mode;
1479 else
1480 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1482 else
1483 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1485 /* Allow targets that use untyped_call and untyped_return to override
1486 the size so that machine-specific information can be stored here. */
1487 #ifdef APPLY_RESULT_SIZE
1488 size = APPLY_RESULT_SIZE;
1489 #endif
1491 return size;
1494 /* Create a vector describing the result block RESULT. If SAVEP is true,
1495 the result block is used to save the values; otherwise it is used to
1496 restore the values. */
1498 static rtx
1499 result_vector (int savep, rtx result)
1501 int regno, size, align, nelts;
1502 fixed_size_mode mode;
1503 rtx reg, mem;
1504 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1506 size = nelts = 0;
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if ((mode = apply_result_mode[regno]) != VOIDmode)
1510 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1511 if (size % align != 0)
1512 size = CEIL (size, align) * align;
1513 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1514 mem = adjust_address (result, mode, size);
1515 savevec[nelts++] = (savep
1516 ? gen_rtx_SET (mem, reg)
1517 : gen_rtx_SET (reg, mem));
1518 size += GET_MODE_SIZE (mode);
1520 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1523 /* Save the state required to perform an untyped call with the same
1524 arguments as were passed to the current function. */
1526 static rtx
1527 expand_builtin_apply_args_1 (void)
1529 rtx registers, tem;
1530 int size, align, regno;
1531 fixed_size_mode mode;
1532 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1534 /* Create a block where the arg-pointer, structure value address,
1535 and argument registers can be saved. */
1536 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1538 /* Walk past the arg-pointer and structure value address. */
1539 size = GET_MODE_SIZE (Pmode);
1540 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1541 size += GET_MODE_SIZE (Pmode);
1543 /* Save each register used in calling a function to the block. */
1544 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1545 if ((mode = apply_args_mode[regno]) != VOIDmode)
1547 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1548 if (size % align != 0)
1549 size = CEIL (size, align) * align;
1551 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1553 emit_move_insn (adjust_address (registers, mode, size), tem);
1554 size += GET_MODE_SIZE (mode);
1557 /* Save the arg pointer to the block. */
1558 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1559 /* We need the pointer as the caller actually passed them to us, not
1560 as we might have pretended they were passed. Make sure it's a valid
1561 operand, as emit_move_insn isn't expected to handle a PLUS. */
1562 if (STACK_GROWS_DOWNWARD)
1564 = force_operand (plus_constant (Pmode, tem,
1565 crtl->args.pretend_args_size),
1566 NULL_RTX);
1567 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1569 size = GET_MODE_SIZE (Pmode);
1571 /* Save the structure value address unless this is passed as an
1572 "invisible" first argument. */
1573 if (struct_incoming_value)
1574 emit_move_insn (adjust_address (registers, Pmode, size),
1575 copy_to_reg (struct_incoming_value));
1577 /* Return the address of the block. */
1578 return copy_addr_to_reg (XEXP (registers, 0));
1581 /* __builtin_apply_args returns block of memory allocated on
1582 the stack into which is stored the arg pointer, structure
1583 value address, static chain, and all the registers that might
1584 possibly be used in performing a function call. The code is
1585 moved to the start of the function so the incoming values are
1586 saved. */
1588 static rtx
1589 expand_builtin_apply_args (void)
1591 /* Don't do __builtin_apply_args more than once in a function.
1592 Save the result of the first call and reuse it. */
1593 if (apply_args_value != 0)
1594 return apply_args_value;
1596 /* When this function is called, it means that registers must be
1597 saved on entry to this function. So we migrate the
1598 call to the first insn of this function. */
1599 rtx temp;
1601 start_sequence ();
1602 temp = expand_builtin_apply_args_1 ();
1603 rtx_insn *seq = get_insns ();
1604 end_sequence ();
1606 apply_args_value = temp;
1608 /* Put the insns after the NOTE that starts the function.
1609 If this is inside a start_sequence, make the outer-level insn
1610 chain current, so the code is placed at the start of the
1611 function. If internal_arg_pointer is a non-virtual pseudo,
1612 it needs to be placed after the function that initializes
1613 that pseudo. */
1614 push_topmost_sequence ();
1615 if (REG_P (crtl->args.internal_arg_pointer)
1616 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1617 emit_insn_before (seq, parm_birth_insn);
1618 else
1619 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1620 pop_topmost_sequence ();
1621 return temp;
1625 /* Perform an untyped call and save the state required to perform an
1626 untyped return of whatever value was returned by the given function. */
1628 static rtx
1629 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1631 int size, align, regno;
1632 fixed_size_mode mode;
1633 rtx incoming_args, result, reg, dest, src;
1634 rtx_call_insn *call_insn;
1635 rtx old_stack_level = 0;
1636 rtx call_fusage = 0;
1637 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1639 arguments = convert_memory_address (Pmode, arguments);
1641 /* Create a block where the return registers can be saved. */
1642 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1644 /* Fetch the arg pointer from the ARGUMENTS block. */
1645 incoming_args = gen_reg_rtx (Pmode);
1646 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1647 if (!STACK_GROWS_DOWNWARD)
1648 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1649 incoming_args, 0, OPTAB_LIB_WIDEN);
1651 /* Push a new argument block and copy the arguments. Do not allow
1652 the (potential) memcpy call below to interfere with our stack
1653 manipulations. */
1654 do_pending_stack_adjust ();
1655 NO_DEFER_POP;
1657 /* Save the stack with nonlocal if available. */
1658 if (targetm.have_save_stack_nonlocal ())
1659 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1660 else
1661 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1663 /* Allocate a block of memory onto the stack and copy the memory
1664 arguments to the outgoing arguments address. We can pass TRUE
1665 as the 4th argument because we just saved the stack pointer
1666 and will restore it right after the call. */
1667 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1669 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1670 may have already set current_function_calls_alloca to true.
1671 current_function_calls_alloca won't be set if argsize is zero,
1672 so we have to guarantee need_drap is true here. */
1673 if (SUPPORTS_STACK_ALIGNMENT)
1674 crtl->need_drap = true;
1676 dest = virtual_outgoing_args_rtx;
1677 if (!STACK_GROWS_DOWNWARD)
1679 if (CONST_INT_P (argsize))
1680 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1681 else
1682 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1684 dest = gen_rtx_MEM (BLKmode, dest);
1685 set_mem_align (dest, PARM_BOUNDARY);
1686 src = gen_rtx_MEM (BLKmode, incoming_args);
1687 set_mem_align (src, PARM_BOUNDARY);
1688 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1690 /* Refer to the argument block. */
1691 apply_args_size ();
1692 arguments = gen_rtx_MEM (BLKmode, arguments);
1693 set_mem_align (arguments, PARM_BOUNDARY);
1695 /* Walk past the arg-pointer and structure value address. */
1696 size = GET_MODE_SIZE (Pmode);
1697 if (struct_value)
1698 size += GET_MODE_SIZE (Pmode);
1700 /* Restore each of the registers previously saved. Make USE insns
1701 for each of these registers for use in making the call. */
1702 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1703 if ((mode = apply_args_mode[regno]) != VOIDmode)
1705 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1706 if (size % align != 0)
1707 size = CEIL (size, align) * align;
1708 reg = gen_rtx_REG (mode, regno);
1709 emit_move_insn (reg, adjust_address (arguments, mode, size));
1710 use_reg (&call_fusage, reg);
1711 size += GET_MODE_SIZE (mode);
1714 /* Restore the structure value address unless this is passed as an
1715 "invisible" first argument. */
1716 size = GET_MODE_SIZE (Pmode);
1717 if (struct_value)
1719 rtx value = gen_reg_rtx (Pmode);
1720 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1721 emit_move_insn (struct_value, value);
1722 if (REG_P (struct_value))
1723 use_reg (&call_fusage, struct_value);
1726 /* All arguments and registers used for the call are set up by now! */
1727 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1729 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1730 and we don't want to load it into a register as an optimization,
1731 because prepare_call_address already did it if it should be done. */
1732 if (GET_CODE (function) != SYMBOL_REF)
1733 function = memory_address (FUNCTION_MODE, function);
1735 /* Generate the actual call instruction and save the return value. */
1736 if (targetm.have_untyped_call ())
1738 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1739 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1740 result_vector (1, result));
1741 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1742 if (CALL_P (insn))
1743 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1744 emit_insn (seq);
1746 else if (targetm.have_call_value ())
1748 rtx valreg = 0;
1750 /* Locate the unique return register. It is not possible to
1751 express a call that sets more than one return register using
1752 call_value; use untyped_call for that. In fact, untyped_call
1753 only needs to save the return registers in the given block. */
1754 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1755 if ((mode = apply_result_mode[regno]) != VOIDmode)
1757 gcc_assert (!valreg); /* have_untyped_call required. */
1759 valreg = gen_rtx_REG (mode, regno);
1762 emit_insn (targetm.gen_call_value (valreg,
1763 gen_rtx_MEM (FUNCTION_MODE, function),
1764 const0_rtx, NULL_RTX, const0_rtx));
1766 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1768 else
1769 gcc_unreachable ();
1771 /* Find the CALL insn we just emitted, and attach the register usage
1772 information. */
1773 call_insn = last_call_insn ();
1774 add_function_usage_to (call_insn, call_fusage);
1776 /* Restore the stack. */
1777 if (targetm.have_save_stack_nonlocal ())
1778 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1779 else
1780 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1781 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1783 OK_DEFER_POP;
1785 /* Return the address of the result block. */
1786 result = copy_addr_to_reg (XEXP (result, 0));
1787 return convert_memory_address (ptr_mode, result);
1790 /* Perform an untyped return. */
1792 static void
1793 expand_builtin_return (rtx result)
1795 int size, align, regno;
1796 fixed_size_mode mode;
1797 rtx reg;
1798 rtx_insn *call_fusage = 0;
1800 result = convert_memory_address (Pmode, result);
1802 apply_result_size ();
1803 result = gen_rtx_MEM (BLKmode, result);
1805 if (targetm.have_untyped_return ())
1807 rtx vector = result_vector (0, result);
1808 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1809 emit_barrier ();
1810 return;
1813 /* Restore the return value and note that each value is used. */
1814 size = 0;
1815 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1816 if ((mode = apply_result_mode[regno]) != VOIDmode)
1818 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1819 if (size % align != 0)
1820 size = CEIL (size, align) * align;
1821 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1822 emit_move_insn (reg, adjust_address (result, mode, size));
1824 push_to_sequence (call_fusage);
1825 emit_use (reg);
1826 call_fusage = get_insns ();
1827 end_sequence ();
1828 size += GET_MODE_SIZE (mode);
1831 /* Put the USE insns before the return. */
1832 emit_insn (call_fusage);
1834 /* Return whatever values was restored by jumping directly to the end
1835 of the function. */
1836 expand_naked_return ();
1839 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1842 type_to_class (tree type)
1844 switch (TREE_CODE (type))
1846 case VOID_TYPE: return void_type_class;
1847 case INTEGER_TYPE: return integer_type_class;
1848 case ENUMERAL_TYPE: return enumeral_type_class;
1849 case BOOLEAN_TYPE: return boolean_type_class;
1850 case POINTER_TYPE: return pointer_type_class;
1851 case REFERENCE_TYPE: return reference_type_class;
1852 case OFFSET_TYPE: return offset_type_class;
1853 case REAL_TYPE: return real_type_class;
1854 case COMPLEX_TYPE: return complex_type_class;
1855 case FUNCTION_TYPE: return function_type_class;
1856 case METHOD_TYPE: return method_type_class;
1857 case RECORD_TYPE: return record_type_class;
1858 case UNION_TYPE:
1859 case QUAL_UNION_TYPE: return union_type_class;
1860 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1861 ? string_type_class : array_type_class);
1862 case LANG_TYPE: return lang_type_class;
1863 case OPAQUE_TYPE: return opaque_type_class;
1864 case BITINT_TYPE: return bitint_type_class;
1865 case VECTOR_TYPE: return vector_type_class;
1866 default: return no_type_class;
1870 /* Expand a call EXP to __builtin_classify_type. */
1872 static rtx
1873 expand_builtin_classify_type (tree exp)
1875 if (call_expr_nargs (exp))
1876 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1877 return GEN_INT (no_type_class);
1880 /* This helper macro, meant to be used in mathfn_built_in below, determines
1881 which among a set of builtin math functions is appropriate for a given type
1882 mode. The `F' (float) and `L' (long double) are automatically generated
1883 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1884 types, there are additional types that are considered with 'F32', 'F64',
1885 'F128', etc. suffixes. */
1886 #define CASE_MATHFN(MATHFN) \
1887 CASE_CFN_##MATHFN: \
1888 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1889 fcodel = BUILT_IN_##MATHFN##L ; break;
1890 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1891 types. */
1892 #define CASE_MATHFN_FLOATN(MATHFN) \
1893 CASE_CFN_##MATHFN: \
1894 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1895 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1896 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1897 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1898 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1899 break;
1900 /* Similar to above, but appends _R after any F/L suffix. */
1901 #define CASE_MATHFN_REENT(MATHFN) \
1902 case CFN_BUILT_IN_##MATHFN##_R: \
1903 case CFN_BUILT_IN_##MATHFN##F_R: \
1904 case CFN_BUILT_IN_##MATHFN##L_R: \
1905 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1906 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1908 /* Return a function equivalent to FN but operating on floating-point
1909 values of type TYPE, or END_BUILTINS if no such function exists.
1910 This is purely an operation on function codes; it does not guarantee
1911 that the target actually has an implementation of the function. */
1913 static built_in_function
1914 mathfn_built_in_2 (tree type, combined_fn fn)
1916 tree mtype;
1917 built_in_function fcode, fcodef, fcodel;
1918 built_in_function fcodef16 = END_BUILTINS;
1919 built_in_function fcodef32 = END_BUILTINS;
1920 built_in_function fcodef64 = END_BUILTINS;
1921 built_in_function fcodef128 = END_BUILTINS;
1922 built_in_function fcodef32x = END_BUILTINS;
1923 built_in_function fcodef64x = END_BUILTINS;
1924 built_in_function fcodef128x = END_BUILTINS;
1926 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1927 break the uses below. */
1928 #undef HUGE_VAL
1929 #undef NAN
1931 switch (fn)
1933 #define SEQ_OF_CASE_MATHFN \
1934 CASE_MATHFN_FLOATN (ACOS) \
1935 CASE_MATHFN_FLOATN (ACOSH) \
1936 CASE_MATHFN_FLOATN (ASIN) \
1937 CASE_MATHFN_FLOATN (ASINH) \
1938 CASE_MATHFN_FLOATN (ATAN) \
1939 CASE_MATHFN_FLOATN (ATAN2) \
1940 CASE_MATHFN_FLOATN (ATANH) \
1941 CASE_MATHFN_FLOATN (CBRT) \
1942 CASE_MATHFN_FLOATN (CEIL) \
1943 CASE_MATHFN (CEXPI) \
1944 CASE_MATHFN_FLOATN (COPYSIGN) \
1945 CASE_MATHFN_FLOATN (COS) \
1946 CASE_MATHFN_FLOATN (COSH) \
1947 CASE_MATHFN (DREM) \
1948 CASE_MATHFN_FLOATN (ERF) \
1949 CASE_MATHFN_FLOATN (ERFC) \
1950 CASE_MATHFN_FLOATN (EXP) \
1951 CASE_MATHFN (EXP10) \
1952 CASE_MATHFN_FLOATN (EXP2) \
1953 CASE_MATHFN_FLOATN (EXPM1) \
1954 CASE_MATHFN_FLOATN (FABS) \
1955 CASE_MATHFN_FLOATN (FDIM) \
1956 CASE_MATHFN_FLOATN (FLOOR) \
1957 CASE_MATHFN_FLOATN (FMA) \
1958 CASE_MATHFN_FLOATN (FMAX) \
1959 CASE_MATHFN_FLOATN (FMIN) \
1960 CASE_MATHFN_FLOATN (FMOD) \
1961 CASE_MATHFN_FLOATN (FREXP) \
1962 CASE_MATHFN (GAMMA) \
1963 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1964 CASE_MATHFN_FLOATN (HUGE_VAL) \
1965 CASE_MATHFN_FLOATN (HYPOT) \
1966 CASE_MATHFN_FLOATN (ILOGB) \
1967 CASE_MATHFN (ICEIL) \
1968 CASE_MATHFN (IFLOOR) \
1969 CASE_MATHFN_FLOATN (INF) \
1970 CASE_MATHFN (IRINT) \
1971 CASE_MATHFN (IROUND) \
1972 CASE_MATHFN (ISINF) \
1973 CASE_MATHFN (J0) \
1974 CASE_MATHFN (J1) \
1975 CASE_MATHFN (JN) \
1976 CASE_MATHFN (LCEIL) \
1977 CASE_MATHFN_FLOATN (LDEXP) \
1978 CASE_MATHFN (LFLOOR) \
1979 CASE_MATHFN_FLOATN (LGAMMA) \
1980 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1981 CASE_MATHFN (LLCEIL) \
1982 CASE_MATHFN (LLFLOOR) \
1983 CASE_MATHFN_FLOATN (LLRINT) \
1984 CASE_MATHFN_FLOATN (LLROUND) \
1985 CASE_MATHFN_FLOATN (LOG) \
1986 CASE_MATHFN_FLOATN (LOG10) \
1987 CASE_MATHFN_FLOATN (LOG1P) \
1988 CASE_MATHFN_FLOATN (LOG2) \
1989 CASE_MATHFN_FLOATN (LOGB) \
1990 CASE_MATHFN_FLOATN (LRINT) \
1991 CASE_MATHFN_FLOATN (LROUND) \
1992 CASE_MATHFN_FLOATN (MODF) \
1993 CASE_MATHFN_FLOATN (NAN) \
1994 CASE_MATHFN_FLOATN (NANS) \
1995 CASE_MATHFN_FLOATN (NEARBYINT) \
1996 CASE_MATHFN_FLOATN (NEXTAFTER) \
1997 CASE_MATHFN (NEXTTOWARD) \
1998 CASE_MATHFN_FLOATN (POW) \
1999 CASE_MATHFN (POWI) \
2000 CASE_MATHFN (POW10) \
2001 CASE_MATHFN_FLOATN (REMAINDER) \
2002 CASE_MATHFN_FLOATN (REMQUO) \
2003 CASE_MATHFN_FLOATN (RINT) \
2004 CASE_MATHFN_FLOATN (ROUND) \
2005 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2006 CASE_MATHFN (SCALB) \
2007 CASE_MATHFN_FLOATN (SCALBLN) \
2008 CASE_MATHFN_FLOATN (SCALBN) \
2009 CASE_MATHFN (SIGNBIT) \
2010 CASE_MATHFN (SIGNIFICAND) \
2011 CASE_MATHFN_FLOATN (SIN) \
2012 CASE_MATHFN (SINCOS) \
2013 CASE_MATHFN_FLOATN (SINH) \
2014 CASE_MATHFN_FLOATN (SQRT) \
2015 CASE_MATHFN_FLOATN (TAN) \
2016 CASE_MATHFN_FLOATN (TANH) \
2017 CASE_MATHFN_FLOATN (TGAMMA) \
2018 CASE_MATHFN_FLOATN (TRUNC) \
2019 CASE_MATHFN (Y0) \
2020 CASE_MATHFN (Y1) \
2021 CASE_MATHFN (YN)
2023 SEQ_OF_CASE_MATHFN
2025 default:
2026 return END_BUILTINS;
2029 mtype = TYPE_MAIN_VARIANT (type);
2030 if (mtype == double_type_node)
2031 return fcode;
2032 else if (mtype == float_type_node)
2033 return fcodef;
2034 else if (mtype == long_double_type_node)
2035 return fcodel;
2036 else if (mtype == float16_type_node)
2037 return fcodef16;
2038 else if (mtype == float32_type_node)
2039 return fcodef32;
2040 else if (mtype == float64_type_node)
2041 return fcodef64;
2042 else if (mtype == float128_type_node)
2043 return fcodef128;
2044 else if (mtype == float32x_type_node)
2045 return fcodef32x;
2046 else if (mtype == float64x_type_node)
2047 return fcodef64x;
2048 else if (mtype == float128x_type_node)
2049 return fcodef128x;
2050 else
2051 return END_BUILTINS;
2054 #undef CASE_MATHFN
2055 #undef CASE_MATHFN_FLOATN
2056 #undef CASE_MATHFN_REENT
2058 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2059 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2060 otherwise use the explicit declaration. If we can't do the conversion,
2061 return null. */
2063 static tree
2064 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2066 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2067 if (fcode2 == END_BUILTINS)
2068 return NULL_TREE;
2070 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2071 return NULL_TREE;
2073 return builtin_decl_explicit (fcode2);
2076 /* Like mathfn_built_in_1, but always use the implicit array. */
2078 tree
2079 mathfn_built_in (tree type, combined_fn fn)
2081 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2084 /* Like mathfn_built_in_1, but always use the explicit array. */
2086 tree
2087 mathfn_built_in_explicit (tree type, combined_fn fn)
2089 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2092 /* Like mathfn_built_in_1, but take a built_in_function and
2093 always use the implicit array. */
2095 tree
2096 mathfn_built_in (tree type, enum built_in_function fn)
2098 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2101 /* Return the type associated with a built in function, i.e., the one
2102 to be passed to mathfn_built_in to get the type-specific
2103 function. */
2105 tree
2106 mathfn_built_in_type (combined_fn fn)
2108 #define CASE_MATHFN(MATHFN) \
2109 case CFN_BUILT_IN_##MATHFN: \
2110 return double_type_node; \
2111 case CFN_BUILT_IN_##MATHFN##F: \
2112 return float_type_node; \
2113 case CFN_BUILT_IN_##MATHFN##L: \
2114 return long_double_type_node;
2116 #define CASE_MATHFN_FLOATN(MATHFN) \
2117 CASE_MATHFN(MATHFN) \
2118 case CFN_BUILT_IN_##MATHFN##F16: \
2119 return float16_type_node; \
2120 case CFN_BUILT_IN_##MATHFN##F32: \
2121 return float32_type_node; \
2122 case CFN_BUILT_IN_##MATHFN##F64: \
2123 return float64_type_node; \
2124 case CFN_BUILT_IN_##MATHFN##F128: \
2125 return float128_type_node; \
2126 case CFN_BUILT_IN_##MATHFN##F32X: \
2127 return float32x_type_node; \
2128 case CFN_BUILT_IN_##MATHFN##F64X: \
2129 return float64x_type_node; \
2130 case CFN_BUILT_IN_##MATHFN##F128X: \
2131 return float128x_type_node;
2133 /* Similar to above, but appends _R after any F/L suffix. */
2134 #define CASE_MATHFN_REENT(MATHFN) \
2135 case CFN_BUILT_IN_##MATHFN##_R: \
2136 return double_type_node; \
2137 case CFN_BUILT_IN_##MATHFN##F_R: \
2138 return float_type_node; \
2139 case CFN_BUILT_IN_##MATHFN##L_R: \
2140 return long_double_type_node;
2142 switch (fn)
2144 SEQ_OF_CASE_MATHFN
2146 default:
2147 return NULL_TREE;
2150 #undef CASE_MATHFN
2151 #undef CASE_MATHFN_FLOATN
2152 #undef CASE_MATHFN_REENT
2153 #undef SEQ_OF_CASE_MATHFN
2156 /* Check whether there is an internal function associated with function FN
2157 and return type RETURN_TYPE. Return the function if so, otherwise return
2158 IFN_LAST.
2160 Note that this function only tests whether the function is defined in
2161 internals.def, not whether it is actually available on the target. */
2163 static internal_fn
2164 associated_internal_fn (built_in_function fn, tree return_type)
2166 switch (fn)
2168 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2170 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2171 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2172 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2174 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2175 #include "internal-fn.def"
2177 CASE_FLT_FN (BUILT_IN_POW10):
2178 return IFN_EXP10;
2180 CASE_FLT_FN (BUILT_IN_DREM):
2181 return IFN_REMAINDER;
2183 CASE_FLT_FN (BUILT_IN_SCALBN):
2184 CASE_FLT_FN (BUILT_IN_SCALBLN):
2185 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2186 return IFN_LDEXP;
2187 return IFN_LAST;
2189 default:
2190 return IFN_LAST;
2194 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2195 return its code, otherwise return IFN_LAST. Note that this function
2196 only tests whether the function is defined in internals.def, not whether
2197 it is actually available on the target. */
2199 internal_fn
2200 associated_internal_fn (tree fndecl)
2202 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2203 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2204 TREE_TYPE (TREE_TYPE (fndecl)));
2207 /* Check whether there is an internal function associated with function CFN
2208 and return type RETURN_TYPE. Return the function if so, otherwise return
2209 IFN_LAST.
2211 Note that this function only tests whether the function is defined in
2212 internals.def, not whether it is actually available on the target. */
2214 internal_fn
2215 associated_internal_fn (combined_fn cfn, tree return_type)
2217 if (internal_fn_p (cfn))
2218 return as_internal_fn (cfn);
2219 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2222 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2223 on the current target by a call to an internal function, return the
2224 code of that internal function, otherwise return IFN_LAST. The caller
2225 is responsible for ensuring that any side-effects of the built-in
2226 call are dealt with correctly. E.g. if CALL sets errno, the caller
2227 must decide that the errno result isn't needed or make it available
2228 in some other way. */
2230 internal_fn
2231 replacement_internal_fn (gcall *call)
2233 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2235 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2236 if (ifn != IFN_LAST)
2238 tree_pair types = direct_internal_fn_types (ifn, call);
2239 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2240 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2241 return ifn;
2244 return IFN_LAST;
2247 /* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2252 operands. */
2254 static rtx
2255 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2257 optab builtin_optab;
2258 rtx op0, op1, op2, result;
2259 rtx_insn *insns;
2260 tree fndecl = get_callee_fndecl (exp);
2261 tree arg0, arg1, arg2;
2262 machine_mode mode;
2264 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg0 = CALL_EXPR_ARG (exp, 0);
2268 arg1 = CALL_EXPR_ARG (exp, 1);
2269 arg2 = CALL_EXPR_ARG (exp, 2);
2271 switch (DECL_FUNCTION_CODE (fndecl))
2273 CASE_FLT_FN (BUILT_IN_FMA):
2274 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2275 builtin_optab = fma_optab; break;
2276 default:
2277 gcc_unreachable ();
2280 /* Make a suitable register to place result in. */
2281 mode = TYPE_MODE (TREE_TYPE (exp));
2283 /* Before working hard, check whether the instruction is available. */
2284 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2285 return NULL_RTX;
2287 result = gen_reg_rtx (mode);
2289 /* Always stabilize the argument list. */
2290 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2291 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2292 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2294 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2295 op1 = expand_normal (arg1);
2296 op2 = expand_normal (arg2);
2298 start_sequence ();
2300 /* Compute into RESULT.
2301 Set RESULT to wherever the result comes back. */
2302 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2303 result, 0);
2305 /* If we were unable to expand via the builtin, stop the sequence
2306 (without outputting the insns) and call to the library function
2307 with the stabilized argument list. */
2308 if (result == 0)
2310 end_sequence ();
2311 return expand_call (exp, target, target == const0_rtx);
2314 /* Output the entire sequence. */
2315 insns = get_insns ();
2316 end_sequence ();
2317 emit_insn (insns);
2319 return result;
2322 /* Expand a call to the builtin sin and cos math functions.
2323 Return NULL_RTX if a normal call should be emitted rather than expanding the
2324 function in-line. EXP is the expression that is a call to the builtin
2325 function; if convenient, the result should be placed in TARGET.
2326 SUBTARGET may be used as the target for computing one of EXP's
2327 operands. */
2329 static rtx
2330 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2332 optab builtin_optab;
2333 rtx op0;
2334 rtx_insn *insns;
2335 tree fndecl = get_callee_fndecl (exp);
2336 machine_mode mode;
2337 tree arg;
2339 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2340 return NULL_RTX;
2342 arg = CALL_EXPR_ARG (exp, 0);
2344 switch (DECL_FUNCTION_CODE (fndecl))
2346 CASE_FLT_FN (BUILT_IN_SIN):
2347 CASE_FLT_FN (BUILT_IN_COS):
2348 builtin_optab = sincos_optab; break;
2349 default:
2350 gcc_unreachable ();
2353 /* Make a suitable register to place result in. */
2354 mode = TYPE_MODE (TREE_TYPE (exp));
2356 /* Check if sincos insn is available, otherwise fallback
2357 to sin or cos insn. */
2358 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2359 switch (DECL_FUNCTION_CODE (fndecl))
2361 CASE_FLT_FN (BUILT_IN_SIN):
2362 builtin_optab = sin_optab; break;
2363 CASE_FLT_FN (BUILT_IN_COS):
2364 builtin_optab = cos_optab; break;
2365 default:
2366 gcc_unreachable ();
2369 /* Before working hard, check whether the instruction is available. */
2370 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2372 rtx result = gen_reg_rtx (mode);
2374 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2375 need to expand the argument again. This way, we will not perform
2376 side-effects more the once. */
2377 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2379 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2381 start_sequence ();
2383 /* Compute into RESULT.
2384 Set RESULT to wherever the result comes back. */
2385 if (builtin_optab == sincos_optab)
2387 int ok;
2389 switch (DECL_FUNCTION_CODE (fndecl))
2391 CASE_FLT_FN (BUILT_IN_SIN):
2392 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2393 break;
2394 CASE_FLT_FN (BUILT_IN_COS):
2395 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2396 break;
2397 default:
2398 gcc_unreachable ();
2400 gcc_assert (ok);
2402 else
2403 result = expand_unop (mode, builtin_optab, op0, result, 0);
2405 if (result != 0)
2407 /* Output the entire sequence. */
2408 insns = get_insns ();
2409 end_sequence ();
2410 emit_insn (insns);
2411 return result;
2414 /* If we were unable to expand via the builtin, stop the sequence
2415 (without outputting the insns) and call to the library function
2416 with the stabilized argument list. */
2417 end_sequence ();
2420 return expand_call (exp, target, target == const0_rtx);
2423 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2424 return an RTL instruction code that implements the functionality.
2425 If that isn't possible or available return CODE_FOR_nothing. */
2427 static enum insn_code
2428 interclass_mathfn_icode (tree arg, tree fndecl)
2430 bool errno_set = false;
2431 optab builtin_optab = unknown_optab;
2432 machine_mode mode;
2434 switch (DECL_FUNCTION_CODE (fndecl))
2436 CASE_FLT_FN (BUILT_IN_ILOGB):
2437 errno_set = true; builtin_optab = ilogb_optab; break;
2438 CASE_FLT_FN (BUILT_IN_ISINF):
2439 builtin_optab = isinf_optab; break;
2440 case BUILT_IN_ISNORMAL:
2441 case BUILT_IN_ISFINITE:
2442 CASE_FLT_FN (BUILT_IN_FINITE):
2443 case BUILT_IN_FINITED32:
2444 case BUILT_IN_FINITED64:
2445 case BUILT_IN_FINITED128:
2446 case BUILT_IN_ISINFD32:
2447 case BUILT_IN_ISINFD64:
2448 case BUILT_IN_ISINFD128:
2449 /* These builtins have no optabs (yet). */
2450 break;
2451 default:
2452 gcc_unreachable ();
2455 /* There's no easy way to detect the case we need to set EDOM. */
2456 if (flag_errno_math && errno_set)
2457 return CODE_FOR_nothing;
2459 /* Optab mode depends on the mode of the input argument. */
2460 mode = TYPE_MODE (TREE_TYPE (arg));
2462 if (builtin_optab)
2463 return optab_handler (builtin_optab, mode);
2464 return CODE_FOR_nothing;
2467 /* Expand a call to one of the builtin math functions that operate on
2468 floating point argument and output an integer result (ilogb, isinf,
2469 isnan, etc).
2470 Return 0 if a normal call should be emitted rather than expanding the
2471 function in-line. EXP is the expression that is a call to the builtin
2472 function; if convenient, the result should be placed in TARGET. */
2474 static rtx
2475 expand_builtin_interclass_mathfn (tree exp, rtx target)
2477 enum insn_code icode = CODE_FOR_nothing;
2478 rtx op0;
2479 tree fndecl = get_callee_fndecl (exp);
2480 machine_mode mode;
2481 tree arg;
2483 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2484 return NULL_RTX;
2486 arg = CALL_EXPR_ARG (exp, 0);
2487 icode = interclass_mathfn_icode (arg, fndecl);
2488 mode = TYPE_MODE (TREE_TYPE (arg));
2490 if (icode != CODE_FOR_nothing)
2492 class expand_operand ops[1];
2493 rtx_insn *last = get_last_insn ();
2494 tree orig_arg = arg;
2496 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2497 need to expand the argument again. This way, we will not perform
2498 side-effects more the once. */
2499 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2501 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2503 if (mode != GET_MODE (op0))
2504 op0 = convert_to_mode (mode, op0, 0);
2506 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2507 if (maybe_legitimize_operands (icode, 0, 1, ops)
2508 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2509 return ops[0].value;
2511 delete_insns_since (last);
2512 CALL_EXPR_ARG (exp, 0) = orig_arg;
2515 return NULL_RTX;
2518 /* Expand a call to the builtin sincos math function.
2519 Return NULL_RTX if a normal call should be emitted rather than expanding the
2520 function in-line. EXP is the expression that is a call to the builtin
2521 function. */
2523 static rtx
2524 expand_builtin_sincos (tree exp)
2526 rtx op0, op1, op2, target1, target2;
2527 machine_mode mode;
2528 tree arg, sinp, cosp;
2529 int result;
2530 location_t loc = EXPR_LOCATION (exp);
2531 tree alias_type, alias_off;
2533 if (!validate_arglist (exp, REAL_TYPE,
2534 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2535 return NULL_RTX;
2537 arg = CALL_EXPR_ARG (exp, 0);
2538 sinp = CALL_EXPR_ARG (exp, 1);
2539 cosp = CALL_EXPR_ARG (exp, 2);
2541 /* Make a suitable register to place result in. */
2542 mode = TYPE_MODE (TREE_TYPE (arg));
2544 /* Check if sincos insn is available, otherwise emit the call. */
2545 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2546 return NULL_RTX;
2548 target1 = gen_reg_rtx (mode);
2549 target2 = gen_reg_rtx (mode);
2551 op0 = expand_normal (arg);
2552 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2553 alias_off = build_int_cst (alias_type, 0);
2554 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2555 sinp, alias_off));
2556 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2557 cosp, alias_off));
2559 /* Compute into target1 and target2.
2560 Set TARGET to wherever the result comes back. */
2561 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2562 gcc_assert (result);
2564 /* Move target1 and target2 to the memory locations indicated
2565 by op1 and op2. */
2566 emit_move_insn (op1, target1);
2567 emit_move_insn (op2, target2);
2569 return const0_rtx;
2572 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2573 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2574 static rtx
2575 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2577 if (!validate_arglist (exp, VOID_TYPE))
2578 return NULL_RTX;
2580 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2581 if (icode == CODE_FOR_nothing)
2582 return NULL_RTX;
2584 if (target == 0
2585 || GET_MODE (target) != target_mode
2586 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2587 target = gen_reg_rtx (target_mode);
2589 rtx pat = GEN_FCN (icode) (target);
2590 if (!pat)
2591 return NULL_RTX;
2592 emit_insn (pat);
2594 return target;
2597 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2598 fenv.h), returning the result and setting it in TARGET. Otherwise return
2599 NULL_RTX on failure. */
2600 static rtx
2601 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2602 machine_mode target_mode, optab op_optab)
2604 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2605 return NULL_RTX;
2606 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2608 insn_code icode = direct_optab_handler (op_optab, SImode);
2609 if (icode == CODE_FOR_nothing)
2610 return NULL_RTX;
2612 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2613 return NULL_RTX;
2615 if (target == 0
2616 || GET_MODE (target) != target_mode
2617 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2618 target = gen_reg_rtx (target_mode);
2620 rtx pat = GEN_FCN (icode) (target, op0);
2621 if (!pat)
2622 return NULL_RTX;
2623 emit_insn (pat);
2625 return target;
2628 /* Expand a call to the internal cexpi builtin to the sincos math function.
2629 EXP is the expression that is a call to the builtin function; if convenient,
2630 the result should be placed in TARGET. */
2632 static rtx
2633 expand_builtin_cexpi (tree exp, rtx target)
2635 tree fndecl = get_callee_fndecl (exp);
2636 tree arg, type;
2637 machine_mode mode;
2638 rtx op0, op1, op2;
2639 location_t loc = EXPR_LOCATION (exp);
2641 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2642 return NULL_RTX;
2644 arg = CALL_EXPR_ARG (exp, 0);
2645 type = TREE_TYPE (arg);
2646 mode = TYPE_MODE (TREE_TYPE (arg));
2648 /* Try expanding via a sincos optab, fall back to emitting a libcall
2649 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2650 is only generated from sincos, cexp or if we have either of them. */
2651 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2653 op1 = gen_reg_rtx (mode);
2654 op2 = gen_reg_rtx (mode);
2656 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2658 /* Compute into op1 and op2. */
2659 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2661 else if (targetm.libc_has_function (function_sincos, type))
2663 tree call, fn = NULL_TREE;
2664 tree top1, top2;
2665 rtx op1a, op2a;
2667 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2668 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2670 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2671 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2672 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2673 else
2674 gcc_unreachable ();
2676 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2677 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2678 op1a = copy_addr_to_reg (XEXP (op1, 0));
2679 op2a = copy_addr_to_reg (XEXP (op2, 0));
2680 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2681 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2683 /* Make sure not to fold the sincos call again. */
2684 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2685 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2686 call, 3, arg, top1, top2));
2688 else
2690 tree call, fn = NULL_TREE, narg;
2691 tree ctype = build_complex_type (type);
2693 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2694 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2695 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2696 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2697 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2698 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2699 else
2700 gcc_unreachable ();
2702 /* If we don't have a decl for cexp create one. This is the
2703 friendliest fallback if the user calls __builtin_cexpi
2704 without full target C99 function support. */
2705 if (fn == NULL_TREE)
2707 tree fntype;
2708 const char *name = NULL;
2710 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2711 name = "cexpf";
2712 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2713 name = "cexp";
2714 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2715 name = "cexpl";
2717 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2718 fn = build_fn_decl (name, fntype);
2721 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2722 build_real (type, dconst0), arg);
2724 /* Make sure not to fold the cexp call again. */
2725 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2726 return expand_expr (build_call_nary (ctype, call, 1, narg),
2727 target, VOIDmode, EXPAND_NORMAL);
2730 /* Now build the proper return type. */
2731 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2732 make_tree (TREE_TYPE (arg), op2),
2733 make_tree (TREE_TYPE (arg), op1)),
2734 target, VOIDmode, EXPAND_NORMAL);
2737 /* Conveniently construct a function call expression. FNDECL names the
2738 function to be called, N is the number of arguments, and the "..."
2739 parameters are the argument expressions. Unlike build_call_exr
2740 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2742 static tree
2743 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2745 va_list ap;
2746 tree fntype = TREE_TYPE (fndecl);
2747 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2749 va_start (ap, n);
2750 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2751 va_end (ap);
2752 SET_EXPR_LOCATION (fn, loc);
2753 return fn;
2756 /* Expand the __builtin_issignaling builtin. This needs to handle
2757 all floating point formats that do support NaNs (for those that
2758 don't it just sets target to 0). */
2760 static rtx
2761 expand_builtin_issignaling (tree exp, rtx target)
2763 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2764 return NULL_RTX;
2766 tree arg = CALL_EXPR_ARG (exp, 0);
2767 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2768 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2770 /* Expand the argument yielding a RTX expression. */
2771 rtx temp = expand_normal (arg);
2773 /* If mode doesn't support NaN, always return 0.
2774 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2775 __builtin_issignaling working without -fsignaling-nans. Especially
2776 when -fno-signaling-nans is the default.
2777 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2778 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2779 fold to 0 or non-NaN/Inf classification. */
2780 if (!HONOR_NANS (fmode))
2782 emit_move_insn (target, const0_rtx);
2783 return target;
2786 /* Check if the back end provides an insn that handles issignaling for the
2787 argument's mode. */
2788 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2789 if (icode != CODE_FOR_nothing)
2791 rtx_insn *last = get_last_insn ();
2792 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2793 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2794 return this_target;
2795 delete_insns_since (last);
2798 if (DECIMAL_FLOAT_MODE_P (fmode))
2800 scalar_int_mode imode;
2801 rtx hi;
2802 switch (fmt->ieee_bits)
2804 case 32:
2805 case 64:
2806 imode = int_mode_for_mode (fmode).require ();
2807 temp = gen_lowpart (imode, temp);
2808 break;
2809 case 128:
2810 imode = int_mode_for_size (64, 1).require ();
2811 hi = NULL_RTX;
2812 /* For decimal128, TImode support isn't always there and even when
2813 it is, working on the DImode high part is usually better. */
2814 if (!MEM_P (temp))
2816 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2817 subreg_highpart_offset (imode,
2818 fmode)))
2819 hi = t;
2820 else
2822 scalar_int_mode imode2;
2823 if (int_mode_for_mode (fmode).exists (&imode2))
2825 rtx temp2 = gen_lowpart (imode2, temp);
2826 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2827 if (rtx t = simplify_gen_subreg (imode, temp2,
2828 imode2, off))
2829 hi = t;
2832 if (!hi)
2834 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2835 emit_move_insn (mem, temp);
2836 temp = mem;
2839 if (!hi)
2841 poly_int64 offset
2842 = subreg_highpart_offset (imode, GET_MODE (temp));
2843 hi = adjust_address (temp, imode, offset);
2845 temp = hi;
2846 break;
2847 default:
2848 gcc_unreachable ();
2850 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2851 have 6 bits below it all set. */
2852 rtx val
2853 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2854 temp = expand_binop (imode, and_optab, temp, val,
2855 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2856 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2857 return temp;
2860 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2861 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2862 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2863 gcc_assert (MODE_COMPOSITE_P (fmode)
2864 || (fmt->pnan == fmt->p
2865 && fmt->signbit_ro == fmt->signbit_rw));
2867 switch (fmt->p)
2869 case 106: /* IBM double double */
2870 /* For IBM double double, recurse on the most significant double. */
2871 gcc_assert (MODE_COMPOSITE_P (fmode));
2872 temp = convert_modes (DFmode, fmode, temp, 0);
2873 fmode = DFmode;
2874 fmt = REAL_MODE_FORMAT (DFmode);
2875 /* FALLTHRU */
2876 case 8: /* bfloat */
2877 case 11: /* IEEE half */
2878 case 24: /* IEEE single */
2879 case 53: /* IEEE double or Intel extended with rounding to double */
2880 if (fmt->p == 53 && fmt->signbit_ro == 79)
2881 goto extended;
2883 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2884 temp = gen_lowpart (imode, temp);
2885 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2886 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2887 if (fmt->qnan_msb_set)
2889 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2890 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2891 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2892 ((temp ^ bit) & mask) > val. */
2893 temp = expand_binop (imode, xor_optab, temp, bit,
2894 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2895 temp = expand_binop (imode, and_optab, temp, mask,
2896 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2897 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2898 1, 1);
2900 else
2902 /* For MIPS/PA IEEE single/double, expand to:
2903 (temp & val) == val. */
2904 temp = expand_binop (imode, and_optab, temp, val,
2905 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2906 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2907 1, 1);
2910 break;
2911 case 113: /* IEEE quad */
2913 rtx hi = NULL_RTX, lo = NULL_RTX;
2914 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2915 /* For IEEE quad, TImode support isn't always there and even when
2916 it is, working on DImode parts is usually better. */
2917 if (!MEM_P (temp))
2919 hi = simplify_gen_subreg (imode, temp, fmode,
2920 subreg_highpart_offset (imode, fmode));
2921 lo = simplify_gen_subreg (imode, temp, fmode,
2922 subreg_lowpart_offset (imode, fmode));
2923 if (!hi || !lo)
2925 scalar_int_mode imode2;
2926 if (int_mode_for_mode (fmode).exists (&imode2))
2928 rtx temp2 = gen_lowpart (imode2, temp);
2929 hi = simplify_gen_subreg (imode, temp2, imode2,
2930 subreg_highpart_offset (imode,
2931 imode2));
2932 lo = simplify_gen_subreg (imode, temp2, imode2,
2933 subreg_lowpart_offset (imode,
2934 imode2));
2937 if (!hi || !lo)
2939 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2940 emit_move_insn (mem, temp);
2941 temp = mem;
2944 if (!hi || !lo)
2946 poly_int64 offset
2947 = subreg_highpart_offset (imode, GET_MODE (temp));
2948 hi = adjust_address (temp, imode, offset);
2949 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2950 lo = adjust_address (temp, imode, offset);
2952 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2953 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2954 if (fmt->qnan_msb_set)
2956 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2957 - 64)));
2958 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2959 /* For non-MIPS/PA IEEE quad, expand to:
2960 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2961 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2962 lo = expand_binop (imode, ior_optab, lo, nlo,
2963 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2964 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2965 temp = expand_binop (imode, xor_optab, hi, bit,
2966 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2967 temp = expand_binop (imode, ior_optab, temp, lo,
2968 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2969 temp = expand_binop (imode, and_optab, temp, mask,
2970 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2971 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2972 1, 1);
2974 else
2976 /* For MIPS/PA IEEE quad, expand to:
2977 (hi & val) == val. */
2978 temp = expand_binop (imode, and_optab, hi, val,
2979 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2980 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2981 1, 1);
2984 break;
2985 case 64: /* Intel or Motorola extended */
2986 extended:
2988 rtx ex, hi, lo;
2989 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
2990 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
2991 if (!MEM_P (temp))
2993 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2994 emit_move_insn (mem, temp);
2995 temp = mem;
2997 if (fmt->signbit_ro == 95)
2999 /* Motorola, always big endian, with 16-bit gap in between
3000 16-bit sign+exponent and 64-bit mantissa. */
3001 ex = adjust_address (temp, iemode, 0);
3002 hi = adjust_address (temp, imode, 4);
3003 lo = adjust_address (temp, imode, 8);
3005 else if (!WORDS_BIG_ENDIAN)
3007 /* Intel little endian, 64-bit mantissa followed by 16-bit
3008 sign+exponent and then either 16 or 48 bits of gap. */
3009 ex = adjust_address (temp, iemode, 8);
3010 hi = adjust_address (temp, imode, 4);
3011 lo = adjust_address (temp, imode, 0);
3013 else
3015 /* Big endian Itanium. */
3016 ex = adjust_address (temp, iemode, 0);
3017 hi = adjust_address (temp, imode, 2);
3018 lo = adjust_address (temp, imode, 6);
3020 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3021 gcc_assert (fmt->qnan_msb_set);
3022 rtx mask = GEN_INT (0x7fff);
3023 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3024 /* For Intel/Motorola extended format, expand to:
3025 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3026 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3027 lo = expand_binop (imode, ior_optab, lo, nlo,
3028 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3029 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3030 temp = expand_binop (imode, xor_optab, hi, bit,
3031 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3032 temp = expand_binop (imode, ior_optab, temp, lo,
3033 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3034 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3035 ex = expand_binop (iemode, and_optab, ex, mask,
3036 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3037 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3038 ex, mask, iemode, 1, 1);
3039 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3040 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3042 break;
3043 default:
3044 gcc_unreachable ();
3047 return temp;
3050 /* Expand a call to one of the builtin rounding functions gcc defines
3051 as an extension (lfloor and lceil). As these are gcc extensions we
3052 do not need to worry about setting errno to EDOM.
3053 If expanding via optab fails, lower expression to (int)(floor(x)).
3054 EXP is the expression that is a call to the builtin function;
3055 if convenient, the result should be placed in TARGET. */
3057 static rtx
3058 expand_builtin_int_roundingfn (tree exp, rtx target)
3060 convert_optab builtin_optab;
3061 rtx op0, tmp;
3062 rtx_insn *insns;
3063 tree fndecl = get_callee_fndecl (exp);
3064 enum built_in_function fallback_fn;
3065 tree fallback_fndecl;
3066 machine_mode mode;
3067 tree arg;
3069 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3070 return NULL_RTX;
3072 arg = CALL_EXPR_ARG (exp, 0);
3074 switch (DECL_FUNCTION_CODE (fndecl))
3076 CASE_FLT_FN (BUILT_IN_ICEIL):
3077 CASE_FLT_FN (BUILT_IN_LCEIL):
3078 CASE_FLT_FN (BUILT_IN_LLCEIL):
3079 builtin_optab = lceil_optab;
3080 fallback_fn = BUILT_IN_CEIL;
3081 break;
3083 CASE_FLT_FN (BUILT_IN_IFLOOR):
3084 CASE_FLT_FN (BUILT_IN_LFLOOR):
3085 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3086 builtin_optab = lfloor_optab;
3087 fallback_fn = BUILT_IN_FLOOR;
3088 break;
3090 default:
3091 gcc_unreachable ();
3094 /* Make a suitable register to place result in. */
3095 mode = TYPE_MODE (TREE_TYPE (exp));
3097 target = gen_reg_rtx (mode);
3099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3100 need to expand the argument again. This way, we will not perform
3101 side-effects more the once. */
3102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3104 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3106 start_sequence ();
3108 /* Compute into TARGET. */
3109 if (expand_sfix_optab (target, op0, builtin_optab))
3111 /* Output the entire sequence. */
3112 insns = get_insns ();
3113 end_sequence ();
3114 emit_insn (insns);
3115 return target;
3118 /* If we were unable to expand via the builtin, stop the sequence
3119 (without outputting the insns). */
3120 end_sequence ();
3122 /* Fall back to floating point rounding optab. */
3123 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3125 /* For non-C99 targets we may end up without a fallback fndecl here
3126 if the user called __builtin_lfloor directly. In this case emit
3127 a call to the floor/ceil variants nevertheless. This should result
3128 in the best user experience for not full C99 targets. */
3129 if (fallback_fndecl == NULL_TREE)
3131 tree fntype;
3132 const char *name = NULL;
3134 switch (DECL_FUNCTION_CODE (fndecl))
3136 case BUILT_IN_ICEIL:
3137 case BUILT_IN_LCEIL:
3138 case BUILT_IN_LLCEIL:
3139 name = "ceil";
3140 break;
3141 case BUILT_IN_ICEILF:
3142 case BUILT_IN_LCEILF:
3143 case BUILT_IN_LLCEILF:
3144 name = "ceilf";
3145 break;
3146 case BUILT_IN_ICEILL:
3147 case BUILT_IN_LCEILL:
3148 case BUILT_IN_LLCEILL:
3149 name = "ceill";
3150 break;
3151 case BUILT_IN_IFLOOR:
3152 case BUILT_IN_LFLOOR:
3153 case BUILT_IN_LLFLOOR:
3154 name = "floor";
3155 break;
3156 case BUILT_IN_IFLOORF:
3157 case BUILT_IN_LFLOORF:
3158 case BUILT_IN_LLFLOORF:
3159 name = "floorf";
3160 break;
3161 case BUILT_IN_IFLOORL:
3162 case BUILT_IN_LFLOORL:
3163 case BUILT_IN_LLFLOORL:
3164 name = "floorl";
3165 break;
3166 default:
3167 gcc_unreachable ();
3170 fntype = build_function_type_list (TREE_TYPE (arg),
3171 TREE_TYPE (arg), NULL_TREE);
3172 fallback_fndecl = build_fn_decl (name, fntype);
3175 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3177 tmp = expand_normal (exp);
3178 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3180 /* Truncate the result of floating point optab to integer
3181 via expand_fix (). */
3182 target = gen_reg_rtx (mode);
3183 expand_fix (target, tmp, 0);
3185 return target;
3188 /* Expand a call to one of the builtin math functions doing integer
3189 conversion (lrint).
3190 Return 0 if a normal call should be emitted rather than expanding the
3191 function in-line. EXP is the expression that is a call to the builtin
3192 function; if convenient, the result should be placed in TARGET. */
3194 static rtx
3195 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3197 convert_optab builtin_optab;
3198 rtx op0;
3199 rtx_insn *insns;
3200 tree fndecl = get_callee_fndecl (exp);
3201 tree arg;
3202 machine_mode mode;
3203 enum built_in_function fallback_fn = BUILT_IN_NONE;
3205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3206 return NULL_RTX;
3208 arg = CALL_EXPR_ARG (exp, 0);
3210 switch (DECL_FUNCTION_CODE (fndecl))
3212 CASE_FLT_FN (BUILT_IN_IRINT):
3213 fallback_fn = BUILT_IN_LRINT;
3214 gcc_fallthrough ();
3215 CASE_FLT_FN (BUILT_IN_LRINT):
3216 CASE_FLT_FN (BUILT_IN_LLRINT):
3217 builtin_optab = lrint_optab;
3218 break;
3220 CASE_FLT_FN (BUILT_IN_IROUND):
3221 fallback_fn = BUILT_IN_LROUND;
3222 gcc_fallthrough ();
3223 CASE_FLT_FN (BUILT_IN_LROUND):
3224 CASE_FLT_FN (BUILT_IN_LLROUND):
3225 builtin_optab = lround_optab;
3226 break;
3228 default:
3229 gcc_unreachable ();
3232 /* There's no easy way to detect the case we need to set EDOM. */
3233 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3234 return NULL_RTX;
3236 /* Make a suitable register to place result in. */
3237 mode = TYPE_MODE (TREE_TYPE (exp));
3239 /* There's no easy way to detect the case we need to set EDOM. */
3240 if (!flag_errno_math)
3242 rtx result = gen_reg_rtx (mode);
3244 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3245 need to expand the argument again. This way, we will not perform
3246 side-effects more the once. */
3247 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3249 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3251 start_sequence ();
3253 if (expand_sfix_optab (result, op0, builtin_optab))
3255 /* Output the entire sequence. */
3256 insns = get_insns ();
3257 end_sequence ();
3258 emit_insn (insns);
3259 return result;
3262 /* If we were unable to expand via the builtin, stop the sequence
3263 (without outputting the insns) and call to the library function
3264 with the stabilized argument list. */
3265 end_sequence ();
3268 if (fallback_fn != BUILT_IN_NONE)
3270 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3271 targets, (int) round (x) should never be transformed into
3272 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3273 a call to lround in the hope that the target provides at least some
3274 C99 functions. This should result in the best user experience for
3275 not full C99 targets.
3276 As scalar float conversions with same mode are useless in GIMPLE,
3277 we can end up e.g. with _Float32 argument passed to float builtin,
3278 try to get the type from the builtin prototype first. */
3279 tree fallback_fndecl = NULL_TREE;
3280 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3281 fallback_fndecl
3282 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3283 as_combined_fn (fallback_fn), 0);
3284 if (fallback_fndecl == NULL_TREE)
3285 fallback_fndecl
3286 = mathfn_built_in_1 (TREE_TYPE (arg),
3287 as_combined_fn (fallback_fn), 0);
3288 if (fallback_fndecl)
3290 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3291 fallback_fndecl, 1, arg);
3293 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3294 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3295 return convert_to_mode (mode, target, 0);
3299 return expand_call (exp, target, target == const0_rtx);
3302 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3303 a normal call should be emitted rather than expanding the function
3304 in-line. EXP is the expression that is a call to the builtin
3305 function; if convenient, the result should be placed in TARGET. */
3307 static rtx
3308 expand_builtin_powi (tree exp, rtx target)
3310 tree arg0, arg1;
3311 rtx op0, op1;
3312 machine_mode mode;
3313 machine_mode mode2;
3315 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3316 return NULL_RTX;
3318 arg0 = CALL_EXPR_ARG (exp, 0);
3319 arg1 = CALL_EXPR_ARG (exp, 1);
3320 mode = TYPE_MODE (TREE_TYPE (exp));
3322 /* Emit a libcall to libgcc. */
3324 /* Mode of the 2nd argument must match that of an int. */
3325 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3327 if (target == NULL_RTX)
3328 target = gen_reg_rtx (mode);
3330 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3331 if (GET_MODE (op0) != mode)
3332 op0 = convert_to_mode (mode, op0, 0);
3333 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3334 if (GET_MODE (op1) != mode2)
3335 op1 = convert_to_mode (mode2, op1, 0);
3337 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3338 target, LCT_CONST, mode,
3339 op0, mode, op1, mode2);
3341 return target;
3344 /* Expand expression EXP which is a call to the strlen builtin. Return
3345 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3346 try to get the result in TARGET, if convenient. */
3348 static rtx
3349 expand_builtin_strlen (tree exp, rtx target,
3350 machine_mode target_mode)
3352 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3353 return NULL_RTX;
3355 tree src = CALL_EXPR_ARG (exp, 0);
3357 /* If the length can be computed at compile-time, return it. */
3358 if (tree len = c_strlen (src, 0))
3359 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3361 /* If the length can be computed at compile-time and is constant
3362 integer, but there are side-effects in src, evaluate
3363 src for side-effects, then return len.
3364 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3365 can be optimized into: i++; x = 3; */
3366 tree len = c_strlen (src, 1);
3367 if (len && TREE_CODE (len) == INTEGER_CST)
3369 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3370 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3373 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3375 /* If SRC is not a pointer type, don't do this operation inline. */
3376 if (align == 0)
3377 return NULL_RTX;
3379 /* Bail out if we can't compute strlen in the right mode. */
3380 machine_mode insn_mode;
3381 enum insn_code icode = CODE_FOR_nothing;
3382 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3384 icode = optab_handler (strlen_optab, insn_mode);
3385 if (icode != CODE_FOR_nothing)
3386 break;
3388 if (insn_mode == VOIDmode)
3389 return NULL_RTX;
3391 /* Make a place to hold the source address. We will not expand
3392 the actual source until we are sure that the expansion will
3393 not fail -- there are trees that cannot be expanded twice. */
3394 rtx src_reg = gen_reg_rtx (Pmode);
3396 /* Mark the beginning of the strlen sequence so we can emit the
3397 source operand later. */
3398 rtx_insn *before_strlen = get_last_insn ();
3400 class expand_operand ops[4];
3401 create_output_operand (&ops[0], target, insn_mode);
3402 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3403 create_integer_operand (&ops[2], 0);
3404 create_integer_operand (&ops[3], align);
3405 if (!maybe_expand_insn (icode, 4, ops))
3406 return NULL_RTX;
3408 /* Check to see if the argument was declared attribute nonstring
3409 and if so, issue a warning since at this point it's not known
3410 to be nul-terminated. */
3411 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3413 /* Now that we are assured of success, expand the source. */
3414 start_sequence ();
3415 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3416 if (pat != src_reg)
3418 #ifdef POINTERS_EXTEND_UNSIGNED
3419 if (GET_MODE (pat) != Pmode)
3420 pat = convert_to_mode (Pmode, pat,
3421 POINTERS_EXTEND_UNSIGNED);
3422 #endif
3423 emit_move_insn (src_reg, pat);
3425 pat = get_insns ();
3426 end_sequence ();
3428 if (before_strlen)
3429 emit_insn_after (pat, before_strlen);
3430 else
3431 emit_insn_before (pat, get_insns ());
3433 /* Return the value in the proper mode for this function. */
3434 if (GET_MODE (ops[0].value) == target_mode)
3435 target = ops[0].value;
3436 else if (target != 0)
3437 convert_move (target, ops[0].value, 0);
3438 else
3439 target = convert_to_mode (target_mode, ops[0].value, 0);
3441 return target;
3444 /* Expand call EXP to the strnlen built-in, returning the result
3445 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3447 static rtx
3448 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3450 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3451 return NULL_RTX;
3453 tree src = CALL_EXPR_ARG (exp, 0);
3454 tree bound = CALL_EXPR_ARG (exp, 1);
3456 if (!bound)
3457 return NULL_RTX;
3459 location_t loc = UNKNOWN_LOCATION;
3460 if (EXPR_HAS_LOCATION (exp))
3461 loc = EXPR_LOCATION (exp);
3463 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3464 so these conversions aren't necessary. */
3465 c_strlen_data lendata = { };
3466 tree len = c_strlen (src, 0, &lendata, 1);
3467 if (len)
3468 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3470 if (TREE_CODE (bound) == INTEGER_CST)
3472 if (!len)
3473 return NULL_RTX;
3475 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3476 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3479 if (TREE_CODE (bound) != SSA_NAME)
3480 return NULL_RTX;
3482 wide_int min, max;
3483 value_range r;
3484 get_global_range_query ()->range_of_expr (r, bound);
3485 if (r.varying_p () || r.undefined_p ())
3486 return NULL_RTX;
3487 min = r.lower_bound ();
3488 max = r.upper_bound ();
3490 if (!len || TREE_CODE (len) != INTEGER_CST)
3492 bool exact;
3493 lendata.decl = unterminated_array (src, &len, &exact);
3494 if (!lendata.decl)
3495 return NULL_RTX;
3498 if (lendata.decl)
3499 return NULL_RTX;
3501 if (wi::gtu_p (min, wi::to_wide (len)))
3502 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3504 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3505 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3508 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3509 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3510 a target constant. */
3512 static rtx
3513 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3514 fixed_size_mode mode)
3516 /* The REPresentation pointed to by DATA need not be a nul-terminated
3517 string but the caller guarantees it's large enough for MODE. */
3518 const char *rep = (const char *) data;
3520 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3523 /* LEN specify length of the block of memcpy/memset operation.
3524 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3525 In some cases we can make very likely guess on max size, then we
3526 set it into PROBABLE_MAX_SIZE. */
3528 static void
3529 determine_block_size (tree len, rtx len_rtx,
3530 unsigned HOST_WIDE_INT *min_size,
3531 unsigned HOST_WIDE_INT *max_size,
3532 unsigned HOST_WIDE_INT *probable_max_size)
3534 if (CONST_INT_P (len_rtx))
3536 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3537 return;
3539 else
3541 wide_int min, max;
3542 enum value_range_kind range_type = VR_UNDEFINED;
3544 /* Determine bounds from the type. */
3545 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3546 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3547 else
3548 *min_size = 0;
3549 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3550 *probable_max_size = *max_size
3551 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3552 else
3553 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3555 if (TREE_CODE (len) == SSA_NAME)
3557 value_range r;
3558 tree tmin, tmax;
3559 get_global_range_query ()->range_of_expr (r, len);
3560 range_type = get_legacy_range (r, tmin, tmax);
3561 if (range_type != VR_UNDEFINED)
3563 min = wi::to_wide (tmin);
3564 max = wi::to_wide (tmax);
3567 if (range_type == VR_RANGE)
3569 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3570 *min_size = min.to_uhwi ();
3571 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3572 *probable_max_size = *max_size = max.to_uhwi ();
3574 else if (range_type == VR_ANTI_RANGE)
3576 /* Code like
3578 int n;
3579 if (n < 100)
3580 memcpy (a, b, n)
3582 Produce anti range allowing negative values of N. We still
3583 can use the information and make a guess that N is not negative.
3585 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3586 *probable_max_size = min.to_uhwi () - 1;
3589 gcc_checking_assert (*max_size <=
3590 (unsigned HOST_WIDE_INT)
3591 GET_MODE_MASK (GET_MODE (len_rtx)));
3594 /* Expand a call EXP to the memcpy builtin.
3595 Return NULL_RTX if we failed, the caller should emit a normal call,
3596 otherwise try to get the result in TARGET, if convenient (and in
3597 mode MODE if that's convenient). */
3599 static rtx
3600 expand_builtin_memcpy (tree exp, rtx target)
3602 if (!validate_arglist (exp,
3603 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3604 return NULL_RTX;
3606 tree dest = CALL_EXPR_ARG (exp, 0);
3607 tree src = CALL_EXPR_ARG (exp, 1);
3608 tree len = CALL_EXPR_ARG (exp, 2);
3610 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3611 /*retmode=*/ RETURN_BEGIN, false);
3614 /* Check a call EXP to the memmove built-in for validity.
3615 Return NULL_RTX on both success and failure. */
3617 static rtx
3618 expand_builtin_memmove (tree exp, rtx target)
3620 if (!validate_arglist (exp,
3621 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3622 return NULL_RTX;
3624 tree dest = CALL_EXPR_ARG (exp, 0);
3625 tree src = CALL_EXPR_ARG (exp, 1);
3626 tree len = CALL_EXPR_ARG (exp, 2);
3628 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3629 /*retmode=*/ RETURN_BEGIN, true);
3632 /* Expand a call EXP to the mempcpy builtin.
3633 Return NULL_RTX if we failed; the caller should emit a normal call,
3634 otherwise try to get the result in TARGET, if convenient (and in
3635 mode MODE if that's convenient). */
3637 static rtx
3638 expand_builtin_mempcpy (tree exp, rtx target)
3640 if (!validate_arglist (exp,
3641 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3642 return NULL_RTX;
3644 tree dest = CALL_EXPR_ARG (exp, 0);
3645 tree src = CALL_EXPR_ARG (exp, 1);
3646 tree len = CALL_EXPR_ARG (exp, 2);
3648 /* Policy does not generally allow using compute_objsize (which
3649 is used internally by check_memop_size) to change code generation
3650 or drive optimization decisions.
3652 In this instance it is safe because the code we generate has
3653 the same semantics regardless of the return value of
3654 check_memop_sizes. Exactly the same amount of data is copied
3655 and the return value is exactly the same in both cases.
3657 Furthermore, check_memop_size always uses mode 0 for the call to
3658 compute_objsize, so the imprecise nature of compute_objsize is
3659 avoided. */
3661 /* Avoid expanding mempcpy into memcpy when the call is determined
3662 to overflow the buffer. This also prevents the same overflow
3663 from being diagnosed again when expanding memcpy. */
3665 return expand_builtin_mempcpy_args (dest, src, len,
3666 target, exp, /*retmode=*/ RETURN_END);
3669 /* Helper function to do the actual work for expand of memory copy family
3670 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3671 of memory from SRC to DEST and assign to TARGET if convenient. Return
3672 value is based on RETMODE argument. */
3674 static rtx
3675 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3676 rtx target, tree exp, memop_ret retmode,
3677 bool might_overlap)
3679 unsigned int src_align = get_pointer_alignment (src);
3680 unsigned int dest_align = get_pointer_alignment (dest);
3681 rtx dest_mem, src_mem, dest_addr, len_rtx;
3682 HOST_WIDE_INT expected_size = -1;
3683 unsigned int expected_align = 0;
3684 unsigned HOST_WIDE_INT min_size;
3685 unsigned HOST_WIDE_INT max_size;
3686 unsigned HOST_WIDE_INT probable_max_size;
3688 bool is_move_done;
3690 /* If DEST is not a pointer type, call the normal function. */
3691 if (dest_align == 0)
3692 return NULL_RTX;
3694 /* If either SRC is not a pointer type, don't do this
3695 operation in-line. */
3696 if (src_align == 0)
3697 return NULL_RTX;
3699 if (currently_expanding_gimple_stmt)
3700 stringop_block_profile (currently_expanding_gimple_stmt,
3701 &expected_align, &expected_size);
3703 if (expected_align < dest_align)
3704 expected_align = dest_align;
3705 dest_mem = get_memory_rtx (dest, len);
3706 set_mem_align (dest_mem, dest_align);
3707 len_rtx = expand_normal (len);
3708 determine_block_size (len, len_rtx, &min_size, &max_size,
3709 &probable_max_size);
3711 /* Try to get the byte representation of the constant SRC points to,
3712 with its byte size in NBYTES. */
3713 unsigned HOST_WIDE_INT nbytes;
3714 const char *rep = getbyterep (src, &nbytes);
3716 /* If the function's constant bound LEN_RTX is less than or equal
3717 to the byte size of the representation of the constant argument,
3718 and if block move would be done by pieces, we can avoid loading
3719 the bytes from memory and only store the computed constant.
3720 This works in the overlap (memmove) case as well because
3721 store_by_pieces just generates a series of stores of constants
3722 from the representation returned by getbyterep(). */
3723 if (rep
3724 && CONST_INT_P (len_rtx)
3725 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3726 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3727 CONST_CAST (char *, rep),
3728 dest_align, false))
3730 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3731 builtin_memcpy_read_str,
3732 CONST_CAST (char *, rep),
3733 dest_align, false, retmode);
3734 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3735 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3736 return dest_mem;
3739 src_mem = get_memory_rtx (src, len);
3740 set_mem_align (src_mem, src_align);
3742 /* Copy word part most expediently. */
3743 enum block_op_methods method = BLOCK_OP_NORMAL;
3744 if (CALL_EXPR_TAILCALL (exp)
3745 && (retmode == RETURN_BEGIN || target == const0_rtx))
3746 method = BLOCK_OP_TAILCALL;
3747 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3748 && retmode == RETURN_END
3749 && !might_overlap
3750 && target != const0_rtx);
3751 if (use_mempcpy_call)
3752 method = BLOCK_OP_NO_LIBCALL_RET;
3753 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3754 expected_align, expected_size,
3755 min_size, max_size, probable_max_size,
3756 use_mempcpy_call, &is_move_done,
3757 might_overlap, tree_ctz (len));
3759 /* Bail out when a mempcpy call would be expanded as libcall and when
3760 we have a target that provides a fast implementation
3761 of mempcpy routine. */
3762 if (!is_move_done)
3763 return NULL_RTX;
3765 if (dest_addr == pc_rtx)
3766 return NULL_RTX;
3768 if (dest_addr == 0)
3770 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3771 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3774 if (retmode != RETURN_BEGIN && target != const0_rtx)
3776 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3777 /* stpcpy pointer to last byte. */
3778 if (retmode == RETURN_END_MINUS_ONE)
3779 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3782 return dest_addr;
3785 static rtx
3786 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3787 rtx target, tree orig_exp, memop_ret retmode)
3789 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3790 retmode, false);
3793 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3794 we failed, the caller should emit a normal call, otherwise try to
3795 get the result in TARGET, if convenient.
3796 Return value is based on RETMODE argument. */
3798 static rtx
3799 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3801 class expand_operand ops[3];
3802 rtx dest_mem;
3803 rtx src_mem;
3805 if (!targetm.have_movstr ())
3806 return NULL_RTX;
3808 dest_mem = get_memory_rtx (dest, NULL);
3809 src_mem = get_memory_rtx (src, NULL);
3810 if (retmode == RETURN_BEGIN)
3812 target = force_reg (Pmode, XEXP (dest_mem, 0));
3813 dest_mem = replace_equiv_address (dest_mem, target);
3816 create_output_operand (&ops[0],
3817 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3818 create_fixed_operand (&ops[1], dest_mem);
3819 create_fixed_operand (&ops[2], src_mem);
3820 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3821 return NULL_RTX;
3823 if (retmode != RETURN_BEGIN && target != const0_rtx)
3825 target = ops[0].value;
3826 /* movstr is supposed to set end to the address of the NUL
3827 terminator. If the caller requested a mempcpy-like return value,
3828 adjust it. */
3829 if (retmode == RETURN_END)
3831 rtx tem = plus_constant (GET_MODE (target),
3832 gen_lowpart (GET_MODE (target), target), 1);
3833 emit_move_insn (target, force_operand (tem, NULL_RTX));
3836 return target;
3839 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3840 NULL_RTX if we failed the caller should emit a normal call, otherwise
3841 try to get the result in TARGET, if convenient (and in mode MODE if that's
3842 convenient). */
3844 static rtx
3845 expand_builtin_strcpy (tree exp, rtx target)
3847 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3848 return NULL_RTX;
3850 tree dest = CALL_EXPR_ARG (exp, 0);
3851 tree src = CALL_EXPR_ARG (exp, 1);
3853 return expand_builtin_strcpy_args (exp, dest, src, target);
3856 /* Helper function to do the actual work for expand_builtin_strcpy. The
3857 arguments to the builtin_strcpy call DEST and SRC are broken out
3858 so that this can also be called without constructing an actual CALL_EXPR.
3859 The other arguments and return value are the same as for
3860 expand_builtin_strcpy. */
3862 static rtx
3863 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3865 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3868 /* Expand a call EXP to the stpcpy builtin.
3869 Return NULL_RTX if we failed the caller should emit a normal call,
3870 otherwise try to get the result in TARGET, if convenient (and in
3871 mode MODE if that's convenient). */
3873 static rtx
3874 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3876 tree dst, src;
3877 location_t loc = EXPR_LOCATION (exp);
3879 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3880 return NULL_RTX;
3882 dst = CALL_EXPR_ARG (exp, 0);
3883 src = CALL_EXPR_ARG (exp, 1);
3885 /* If return value is ignored, transform stpcpy into strcpy. */
3886 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3888 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3889 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3890 return expand_expr (result, target, mode, EXPAND_NORMAL);
3892 else
3894 tree len, lenp1;
3895 rtx ret;
3897 /* Ensure we get an actual string whose length can be evaluated at
3898 compile-time, not an expression containing a string. This is
3899 because the latter will potentially produce pessimized code
3900 when used to produce the return value. */
3901 c_strlen_data lendata = { };
3902 if (!c_getstr (src)
3903 || !(len = c_strlen (src, 0, &lendata, 1)))
3904 return expand_movstr (dst, src, target,
3905 /*retmode=*/ RETURN_END_MINUS_ONE);
3907 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3908 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3909 target, exp,
3910 /*retmode=*/ RETURN_END_MINUS_ONE);
3912 if (ret)
3913 return ret;
3915 if (TREE_CODE (len) == INTEGER_CST)
3917 rtx len_rtx = expand_normal (len);
3919 if (CONST_INT_P (len_rtx))
3921 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3923 if (ret)
3925 if (! target)
3927 if (mode != VOIDmode)
3928 target = gen_reg_rtx (mode);
3929 else
3930 target = gen_reg_rtx (GET_MODE (ret));
3932 if (GET_MODE (target) != GET_MODE (ret))
3933 ret = gen_lowpart (GET_MODE (target), ret);
3935 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3936 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3937 gcc_assert (ret);
3939 return target;
3944 return expand_movstr (dst, src, target,
3945 /*retmode=*/ RETURN_END_MINUS_ONE);
3949 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3950 arguments while being careful to avoid duplicate warnings (which could
3951 be issued if the expander were to expand the call, resulting in it
3952 being emitted in expand_call(). */
3954 static rtx
3955 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3957 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3959 /* The call has been successfully expanded. Check for nonstring
3960 arguments and issue warnings as appropriate. */
3961 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3962 return ret;
3965 return NULL_RTX;
3968 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3969 bytes from constant string DATA + OFFSET and return it as target
3970 constant. */
3973 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3974 fixed_size_mode mode)
3976 const char *str = (const char *) data;
3978 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3979 return const0_rtx;
3981 return c_readstr (str + offset, mode);
3984 /* Helper to check the sizes of sequences and the destination of calls
3985 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3986 success (no overflow or invalid sizes), false otherwise. */
3988 static bool
3989 check_strncat_sizes (tree exp, tree objsize)
3991 tree dest = CALL_EXPR_ARG (exp, 0);
3992 tree src = CALL_EXPR_ARG (exp, 1);
3993 tree maxread = CALL_EXPR_ARG (exp, 2);
3995 /* Try to determine the range of lengths that the source expression
3996 refers to. */
3997 c_strlen_data lendata = { };
3998 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4000 /* Try to verify that the destination is big enough for the shortest
4001 string. */
4003 access_data data (nullptr, exp, access_read_write, maxread, true);
4004 if (!objsize && warn_stringop_overflow)
4006 /* If it hasn't been provided by __strncat_chk, try to determine
4007 the size of the destination object into which the source is
4008 being copied. */
4009 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4012 /* Add one for the terminating nul. */
4013 tree srclen = (lendata.minlen
4014 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4015 size_one_node)
4016 : NULL_TREE);
4018 /* The strncat function copies at most MAXREAD bytes and always appends
4019 the terminating nul so the specified upper bound should never be equal
4020 to (or greater than) the size of the destination. */
4021 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4022 && tree_int_cst_equal (objsize, maxread))
4024 location_t loc = EXPR_LOCATION (exp);
4025 warning_at (loc, OPT_Wstringop_overflow_,
4026 "%qD specified bound %E equals destination size",
4027 get_callee_fndecl (exp), maxread);
4029 return false;
4032 if (!srclen
4033 || (maxread && tree_fits_uhwi_p (maxread)
4034 && tree_fits_uhwi_p (srclen)
4035 && tree_int_cst_lt (maxread, srclen)))
4036 srclen = maxread;
4038 /* The number of bytes to write is LEN but check_access will alsoa
4039 check SRCLEN if LEN's value isn't known. */
4040 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4041 objsize, data.mode, &data);
4044 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4045 NULL_RTX if we failed the caller should emit a normal call. */
4047 static rtx
4048 expand_builtin_strncpy (tree exp, rtx target)
4050 location_t loc = EXPR_LOCATION (exp);
4052 if (!validate_arglist (exp,
4053 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4054 return NULL_RTX;
4055 tree dest = CALL_EXPR_ARG (exp, 0);
4056 tree src = CALL_EXPR_ARG (exp, 1);
4057 /* The number of bytes to write (not the maximum). */
4058 tree len = CALL_EXPR_ARG (exp, 2);
4060 /* The length of the source sequence. */
4061 tree slen = c_strlen (src, 1);
4063 /* We must be passed a constant len and src parameter. */
4064 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4065 return NULL_RTX;
4067 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4069 /* We're required to pad with trailing zeros if the requested
4070 len is greater than strlen(s2)+1. In that case try to
4071 use store_by_pieces, if it fails, punt. */
4072 if (tree_int_cst_lt (slen, len))
4074 unsigned int dest_align = get_pointer_alignment (dest);
4075 const char *p = c_getstr (src);
4076 rtx dest_mem;
4078 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4079 || !can_store_by_pieces (tree_to_uhwi (len),
4080 builtin_strncpy_read_str,
4081 CONST_CAST (char *, p),
4082 dest_align, false))
4083 return NULL_RTX;
4085 dest_mem = get_memory_rtx (dest, len);
4086 store_by_pieces (dest_mem, tree_to_uhwi (len),
4087 builtin_strncpy_read_str,
4088 CONST_CAST (char *, p), dest_align, false,
4089 RETURN_BEGIN);
4090 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4091 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4092 return dest_mem;
4095 return NULL_RTX;
4098 /* Return the RTL of a register in MODE generated from PREV in the
4099 previous iteration. */
4101 static rtx
4102 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4104 rtx target = nullptr;
4105 if (prev != nullptr && prev->data != nullptr)
4107 /* Use the previous data in the same mode. */
4108 if (prev->mode == mode)
4109 return prev->data;
4111 fixed_size_mode prev_mode = prev->mode;
4113 /* Don't use the previous data to write QImode if it is in a
4114 vector mode. */
4115 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4116 return target;
4118 rtx prev_rtx = prev->data;
4120 if (REG_P (prev_rtx)
4121 && HARD_REGISTER_P (prev_rtx)
4122 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4124 /* This case occurs when PREV_MODE is a vector and when
4125 MODE is too small to store using vector operations.
4126 After register allocation, the code will need to move the
4127 lowpart of the vector register into a non-vector register.
4129 Also, the target has chosen to use a hard register
4130 instead of going with the default choice of using a
4131 pseudo register. We should respect that choice and try to
4132 avoid creating a pseudo register with the same mode as the
4133 current hard register.
4135 In principle, we could just use a lowpart MODE subreg of
4136 the vector register. However, the vector register mode might
4137 be too wide for non-vector registers, and we already know
4138 that the non-vector mode is too small for vector registers.
4139 It's therefore likely that we'd need to spill to memory in
4140 the vector mode and reload the non-vector value from there.
4142 Try to avoid that by reducing the vector register to the
4143 smallest size that it can hold. This should increase the
4144 chances that non-vector registers can hold both the inner
4145 and outer modes of the subreg that we generate later. */
4146 machine_mode m;
4147 fixed_size_mode candidate;
4148 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4149 if (is_a<fixed_size_mode> (m, &candidate))
4151 if (GET_MODE_SIZE (candidate)
4152 >= GET_MODE_SIZE (prev_mode))
4153 break;
4154 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4155 && lowpart_subreg_regno (REGNO (prev_rtx),
4156 prev_mode, candidate) >= 0)
4158 target = lowpart_subreg (candidate, prev_rtx,
4159 prev_mode);
4160 prev_rtx = target;
4161 prev_mode = candidate;
4162 break;
4165 if (target == nullptr)
4166 prev_rtx = copy_to_reg (prev_rtx);
4169 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4171 return target;
4174 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4175 bytes from constant string DATA + OFFSET and return it as target
4176 constant. If PREV isn't nullptr, it has the RTL info from the
4177 previous iteration. */
4180 builtin_memset_read_str (void *data, void *prev,
4181 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4182 fixed_size_mode mode)
4184 const char *c = (const char *) data;
4185 unsigned int size = GET_MODE_SIZE (mode);
4187 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4188 mode);
4189 if (target != nullptr)
4190 return target;
4191 rtx src = gen_int_mode (*c, QImode);
4193 if (VECTOR_MODE_P (mode))
4195 gcc_assert (GET_MODE_INNER (mode) == QImode);
4197 rtx const_vec = gen_const_vec_duplicate (mode, src);
4198 if (prev == NULL)
4199 /* Return CONST_VECTOR when called by a query function. */
4200 return const_vec;
4202 /* Use the move expander with CONST_VECTOR. */
4203 target = gen_reg_rtx (mode);
4204 emit_move_insn (target, const_vec);
4205 return target;
4208 char *p = XALLOCAVEC (char, size);
4210 memset (p, *c, size);
4212 return c_readstr (p, mode);
4215 /* Callback routine for store_by_pieces. Return the RTL of a register
4216 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4217 char value given in the RTL register data. For example, if mode is
4218 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4219 nullptr, it has the RTL info from the previous iteration. */
4221 static rtx
4222 builtin_memset_gen_str (void *data, void *prev,
4223 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4224 fixed_size_mode mode)
4226 rtx target, coeff;
4227 size_t size;
4228 char *p;
4230 size = GET_MODE_SIZE (mode);
4231 if (size == 1)
4232 return (rtx) data;
4234 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4235 if (target != nullptr)
4236 return target;
4238 if (VECTOR_MODE_P (mode))
4240 gcc_assert (GET_MODE_INNER (mode) == QImode);
4242 /* vec_duplicate_optab is a precondition to pick a vector mode for
4243 the memset expander. */
4244 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4246 target = gen_reg_rtx (mode);
4247 class expand_operand ops[2];
4248 create_output_operand (&ops[0], target, mode);
4249 create_input_operand (&ops[1], (rtx) data, QImode);
4250 expand_insn (icode, 2, ops);
4251 if (!rtx_equal_p (target, ops[0].value))
4252 emit_move_insn (target, ops[0].value);
4254 return target;
4257 p = XALLOCAVEC (char, size);
4258 memset (p, 1, size);
4259 coeff = c_readstr (p, mode);
4261 target = convert_to_mode (mode, (rtx) data, 1);
4262 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4263 return force_reg (mode, target);
4266 /* Expand expression EXP, which is a call to the memset builtin. Return
4267 NULL_RTX if we failed the caller should emit a normal call, otherwise
4268 try to get the result in TARGET, if convenient (and in mode MODE if that's
4269 convenient). */
4272 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4274 if (!validate_arglist (exp,
4275 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4276 return NULL_RTX;
4278 tree dest = CALL_EXPR_ARG (exp, 0);
4279 tree val = CALL_EXPR_ARG (exp, 1);
4280 tree len = CALL_EXPR_ARG (exp, 2);
4282 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4285 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4286 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4287 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4288 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4290 The strategy is to issue one store_by_pieces for each power of two,
4291 from most to least significant, guarded by a test on whether there
4292 are at least that many bytes left to copy in LEN.
4294 ??? Should we skip some powers of two in favor of loops? Maybe start
4295 at the max of TO/LEN/word alignment, at least when optimizing for
4296 size, instead of ensuring O(log len) dynamic compares? */
4298 bool
4299 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4300 unsigned HOST_WIDE_INT min_len,
4301 unsigned HOST_WIDE_INT max_len,
4302 rtx val, char valc, unsigned int align)
4304 int max_bits = floor_log2 (max_len);
4305 int min_bits = floor_log2 (min_len);
4306 int sctz_len = ctz_len;
4308 gcc_checking_assert (sctz_len >= 0);
4310 if (val)
4311 valc = 1;
4313 /* Bits more significant than TST_BITS are part of the shared prefix
4314 in the binary representation of both min_len and max_len. Since
4315 they're identical, we don't need to test them in the loop. */
4316 int tst_bits = (max_bits != min_bits ? max_bits
4317 : floor_log2 (max_len ^ min_len));
4319 /* Save the pre-blksize values. */
4320 int orig_max_bits = max_bits;
4321 int orig_tst_bits = tst_bits;
4323 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4324 bytes, to lower max_bits. In the unlikely case of a constant LEN
4325 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4326 single store_by_pieces, but otherwise, select the minimum multiple
4327 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4328 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4329 unsigned HOST_WIDE_INT blksize;
4330 if (max_len > min_len)
4332 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4333 align / BITS_PER_UNIT);
4334 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4335 blksize &= ~(alrng - 1);
4337 else if (max_len == min_len)
4338 blksize = max_len;
4339 else
4340 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4341 return false;
4342 if (min_len >= blksize)
4344 min_len -= blksize;
4345 min_bits = floor_log2 (min_len);
4346 max_len -= blksize;
4347 max_bits = floor_log2 (max_len);
4349 tst_bits = (max_bits != min_bits ? max_bits
4350 : floor_log2 (max_len ^ min_len));
4352 else
4353 blksize = 0;
4355 /* Check that we can use store by pieces for the maximum store count
4356 we may issue (initial fixed-size block, plus conditional
4357 power-of-two-sized from max_bits to ctz_len. */
4358 unsigned HOST_WIDE_INT xlenest = blksize;
4359 if (max_bits >= 0)
4360 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4361 - (HOST_WIDE_INT_1U << ctz_len));
4362 bool max_loop = false;
4363 bool use_store_by_pieces = true;
4364 /* Skip the test in case of overflow in xlenest. It shouldn't
4365 happen because of the way max_bits and blksize are related, but
4366 it doesn't hurt to test. */
4367 if (blksize > xlenest
4368 || !can_store_by_pieces (xlenest, builtin_memset_read_str,
4369 &valc, align, true))
4371 if (!(flag_inline_stringops & ILSOP_MEMSET))
4372 return false;
4374 for (max_bits = orig_max_bits;
4375 max_bits >= sctz_len;
4376 --max_bits)
4378 xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4379 - (HOST_WIDE_INT_1U << ctz_len));
4380 /* Check that blksize plus the bits to be stored as blocks
4381 sized at powers of two can be stored by pieces. This is
4382 like the test above, but with smaller max_bits. Skip
4383 orig_max_bits (it would be redundant). Also skip in case
4384 of overflow. */
4385 if (max_bits < orig_max_bits
4386 && xlenest + blksize >= xlenest
4387 && can_store_by_pieces (xlenest + blksize,
4388 builtin_memset_read_str,
4389 &valc, align, true))
4391 max_loop = true;
4392 break;
4394 if (blksize
4395 && can_store_by_pieces (xlenest,
4396 builtin_memset_read_str,
4397 &valc, align, true))
4399 max_len += blksize;
4400 min_len += blksize;
4401 tst_bits = orig_tst_bits;
4402 blksize = 0;
4403 max_loop = true;
4404 break;
4406 if (max_bits == sctz_len)
4408 /* We'll get here if can_store_by_pieces refuses to
4409 store even a single QImode. We'll fall back to
4410 QImode stores then. */
4411 if (!sctz_len)
4413 blksize = 0;
4414 max_loop = true;
4415 use_store_by_pieces = false;
4416 break;
4418 --sctz_len;
4419 --ctz_len;
4422 if (!max_loop)
4423 return false;
4424 /* If the boundaries are such that min and max may run a
4425 different number of trips in the initial loop, the remainder
4426 needs not be between the moduli, so set tst_bits to cover all
4427 bits. Otherwise, if the trip counts are the same, max_len
4428 has the common prefix, and the previously-computed tst_bits
4429 is usable. */
4430 if (max_len >> max_bits > min_len >> max_bits)
4431 tst_bits = max_bits;
4433 /* ??? Do we have to check that all powers of two lengths from
4434 max_bits down to ctz_len pass can_store_by_pieces? As in, could
4435 it possibly be that xlenest passes while smaller power-of-two
4436 sizes don't? */
4438 by_pieces_constfn constfun;
4439 void *constfundata;
4440 if (val)
4442 constfun = builtin_memset_gen_str;
4443 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4444 val);
4446 else
4448 constfun = builtin_memset_read_str;
4449 constfundata = &valc;
4452 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4453 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4454 to = replace_equiv_address (to, ptr);
4455 set_mem_align (to, align);
4457 if (blksize)
4459 to = store_by_pieces (to, blksize,
4460 constfun, constfundata,
4461 align, true,
4462 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4463 if (max_len == 0)
4464 return true;
4466 /* Adjust PTR, TO and REM. Since TO's address is likely
4467 PTR+offset, we have to replace it. */
4468 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4469 to = replace_equiv_address (to, ptr);
4470 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4471 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4474 /* Iterate over power-of-two block sizes from the maximum length to
4475 the least significant bit possibly set in the length. */
4476 for (int i = max_bits; i >= sctz_len; i--)
4478 rtx_code_label *loop_label = NULL;
4479 rtx_code_label *label = NULL;
4481 blksize = HOST_WIDE_INT_1U << i;
4483 /* If we're past the bits shared between min_ and max_len, expand
4484 a test on the dynamic length, comparing it with the
4485 BLKSIZE. */
4486 if (i <= tst_bits)
4488 label = gen_label_rtx ();
4489 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4490 ptr_mode, 1, label,
4491 profile_probability::even ());
4493 /* If we are at a bit that is in the prefix shared by min_ and
4494 max_len, skip the current BLKSIZE if the bit is clear, but do
4495 not skip the loop, even if it doesn't require
4496 prechecking. */
4497 else if ((max_len & blksize) == 0
4498 && !(max_loop && i == max_bits))
4499 continue;
4501 if (max_loop && i == max_bits)
4503 loop_label = gen_label_rtx ();
4504 emit_label (loop_label);
4505 /* Since we may run this multiple times, don't assume we
4506 know anything about the offset. */
4507 clear_mem_offset (to);
4510 bool update_needed = i != sctz_len || loop_label;
4511 rtx next_ptr = NULL_RTX;
4512 if (!use_store_by_pieces)
4514 gcc_checking_assert (blksize == 1);
4515 if (!val)
4516 val = gen_int_mode (valc, QImode);
4517 to = change_address (to, QImode, 0);
4518 emit_move_insn (to, val);
4519 if (update_needed)
4520 next_ptr = plus_constant (ptr_mode, ptr, blksize);
4522 else
4524 /* Issue a store of BLKSIZE bytes. */
4525 to = store_by_pieces (to, blksize,
4526 constfun, constfundata,
4527 align, true,
4528 update_needed ? RETURN_END : RETURN_BEGIN);
4529 next_ptr = XEXP (to, 0);
4531 /* Adjust REM and PTR, unless this is the last iteration. */
4532 if (update_needed)
4534 emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4535 to = replace_equiv_address (to, ptr);
4536 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4537 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4540 if (loop_label)
4541 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4542 ptr_mode, 1, loop_label,
4543 profile_probability::likely ());
4545 if (label)
4547 emit_label (label);
4549 /* Given conditional stores, the offset can no longer be
4550 known, so clear it. */
4551 clear_mem_offset (to);
4555 return true;
4558 /* Helper function to do the actual work for expand_builtin_memset. The
4559 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4560 so that this can also be called without constructing an actual CALL_EXPR.
4561 The other arguments and return value are the same as for
4562 expand_builtin_memset. */
4564 static rtx
4565 expand_builtin_memset_args (tree dest, tree val, tree len,
4566 rtx target, machine_mode mode, tree orig_exp)
4568 tree fndecl, fn;
4569 enum built_in_function fcode;
4570 machine_mode val_mode;
4571 char c;
4572 unsigned int dest_align;
4573 rtx dest_mem, dest_addr, len_rtx;
4574 HOST_WIDE_INT expected_size = -1;
4575 unsigned int expected_align = 0;
4576 unsigned HOST_WIDE_INT min_size;
4577 unsigned HOST_WIDE_INT max_size;
4578 unsigned HOST_WIDE_INT probable_max_size;
4580 dest_align = get_pointer_alignment (dest);
4582 /* If DEST is not a pointer type, don't do this operation in-line. */
4583 if (dest_align == 0)
4584 return NULL_RTX;
4586 if (currently_expanding_gimple_stmt)
4587 stringop_block_profile (currently_expanding_gimple_stmt,
4588 &expected_align, &expected_size);
4590 if (expected_align < dest_align)
4591 expected_align = dest_align;
4593 /* If the LEN parameter is zero, return DEST. */
4594 if (integer_zerop (len))
4596 /* Evaluate and ignore VAL in case it has side-effects. */
4597 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4598 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4601 /* Stabilize the arguments in case we fail. */
4602 dest = builtin_save_expr (dest);
4603 val = builtin_save_expr (val);
4604 len = builtin_save_expr (len);
4606 len_rtx = expand_normal (len);
4607 determine_block_size (len, len_rtx, &min_size, &max_size,
4608 &probable_max_size);
4609 dest_mem = get_memory_rtx (dest, len);
4610 val_mode = TYPE_MODE (unsigned_char_type_node);
4612 if (TREE_CODE (val) != INTEGER_CST
4613 || target_char_cast (val, &c))
4615 rtx val_rtx;
4617 val_rtx = expand_normal (val);
4618 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4620 /* Assume that we can memset by pieces if we can store
4621 * the coefficients by pieces (in the required modes).
4622 * We can't pass builtin_memset_gen_str as that emits RTL. */
4623 c = 1;
4624 if (tree_fits_uhwi_p (len)
4625 && can_store_by_pieces (tree_to_uhwi (len),
4626 builtin_memset_read_str, &c, dest_align,
4627 true))
4629 val_rtx = force_reg (val_mode, val_rtx);
4630 store_by_pieces (dest_mem, tree_to_uhwi (len),
4631 builtin_memset_gen_str, val_rtx, dest_align,
4632 true, RETURN_BEGIN);
4634 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4635 dest_align, expected_align,
4636 expected_size, min_size, max_size,
4637 probable_max_size)
4638 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4639 tree_ctz (len),
4640 min_size, max_size,
4641 val_rtx, 0,
4642 dest_align))
4643 goto do_libcall;
4645 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4646 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4647 return dest_mem;
4650 if (c)
4652 if (tree_fits_uhwi_p (len)
4653 && can_store_by_pieces (tree_to_uhwi (len),
4654 builtin_memset_read_str, &c, dest_align,
4655 true))
4656 store_by_pieces (dest_mem, tree_to_uhwi (len),
4657 builtin_memset_read_str, &c, dest_align, true,
4658 RETURN_BEGIN);
4659 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4660 gen_int_mode (c, val_mode),
4661 dest_align, expected_align,
4662 expected_size, min_size, max_size,
4663 probable_max_size)
4664 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4665 tree_ctz (len),
4666 min_size, max_size,
4667 NULL_RTX, c,
4668 dest_align))
4669 goto do_libcall;
4671 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4672 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4673 return dest_mem;
4676 set_mem_align (dest_mem, dest_align);
4677 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4678 CALL_EXPR_TAILCALL (orig_exp)
4679 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4680 expected_align, expected_size,
4681 min_size, max_size,
4682 probable_max_size, tree_ctz (len));
4684 if (dest_addr == 0)
4686 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4687 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4690 return dest_addr;
4692 do_libcall:
4693 fndecl = get_callee_fndecl (orig_exp);
4694 fcode = DECL_FUNCTION_CODE (fndecl);
4695 if (fcode == BUILT_IN_MEMSET)
4696 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4697 dest, val, len);
4698 else if (fcode == BUILT_IN_BZERO)
4699 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4700 dest, len);
4701 else
4702 gcc_unreachable ();
4703 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4704 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4705 return expand_call (fn, target, target == const0_rtx);
4708 /* Expand expression EXP, which is a call to the bzero builtin. Return
4709 NULL_RTX if we failed the caller should emit a normal call. */
4711 static rtx
4712 expand_builtin_bzero (tree exp)
4714 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4715 return NULL_RTX;
4717 tree dest = CALL_EXPR_ARG (exp, 0);
4718 tree size = CALL_EXPR_ARG (exp, 1);
4720 /* New argument list transforming bzero(ptr x, int y) to
4721 memset(ptr x, int 0, size_t y). This is done this way
4722 so that if it isn't expanded inline, we fallback to
4723 calling bzero instead of memset. */
4725 location_t loc = EXPR_LOCATION (exp);
4727 return expand_builtin_memset_args (dest, integer_zero_node,
4728 fold_convert_loc (loc,
4729 size_type_node, size),
4730 const0_rtx, VOIDmode, exp);
4733 /* Try to expand cmpstr operation ICODE with the given operands.
4734 Return the result rtx on success, otherwise return null. */
4736 static rtx
4737 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4738 HOST_WIDE_INT align)
4740 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4742 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4743 target = NULL_RTX;
4745 class expand_operand ops[4];
4746 create_output_operand (&ops[0], target, insn_mode);
4747 create_fixed_operand (&ops[1], arg1_rtx);
4748 create_fixed_operand (&ops[2], arg2_rtx);
4749 create_integer_operand (&ops[3], align);
4750 if (maybe_expand_insn (icode, 4, ops))
4751 return ops[0].value;
4752 return NULL_RTX;
4755 /* Expand expression EXP, which is a call to the memcmp built-in function.
4756 Return NULL_RTX if we failed and the caller should emit a normal call,
4757 otherwise try to get the result in TARGET, if convenient.
4758 RESULT_EQ is true if we can relax the returned value to be either zero
4759 or nonzero, without caring about the sign. */
4761 static rtx
4762 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4764 if (!validate_arglist (exp,
4765 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4766 return NULL_RTX;
4768 tree arg1 = CALL_EXPR_ARG (exp, 0);
4769 tree arg2 = CALL_EXPR_ARG (exp, 1);
4770 tree len = CALL_EXPR_ARG (exp, 2);
4772 /* Due to the performance benefit, always inline the calls first
4773 when result_eq is false. */
4774 rtx result = NULL_RTX;
4775 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4776 if (!result_eq && fcode != BUILT_IN_BCMP)
4778 result = inline_expand_builtin_bytecmp (exp, target);
4779 if (result)
4780 return result;
4783 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4784 location_t loc = EXPR_LOCATION (exp);
4786 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4787 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4789 /* If we don't have POINTER_TYPE, call the function. */
4790 if (arg1_align == 0 || arg2_align == 0)
4791 return NULL_RTX;
4793 rtx arg1_rtx = get_memory_rtx (arg1, len);
4794 rtx arg2_rtx = get_memory_rtx (arg2, len);
4795 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4797 /* Set MEM_SIZE as appropriate. */
4798 if (CONST_INT_P (len_rtx))
4800 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4801 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4804 by_pieces_constfn constfn = NULL;
4806 /* Try to get the byte representation of the constant ARG2 (or, only
4807 when the function's result is used for equality to zero, ARG1)
4808 points to, with its byte size in NBYTES. */
4809 unsigned HOST_WIDE_INT nbytes;
4810 const char *rep = getbyterep (arg2, &nbytes);
4811 if (result_eq && rep == NULL)
4813 /* For equality to zero the arguments are interchangeable. */
4814 rep = getbyterep (arg1, &nbytes);
4815 if (rep != NULL)
4816 std::swap (arg1_rtx, arg2_rtx);
4819 /* If the function's constant bound LEN_RTX is less than or equal
4820 to the byte size of the representation of the constant argument,
4821 and if block move would be done by pieces, we can avoid loading
4822 the bytes from memory and only store the computed constant result. */
4823 if (rep
4824 && CONST_INT_P (len_rtx)
4825 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4826 constfn = builtin_memcpy_read_str;
4828 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4829 TREE_TYPE (len), target,
4830 result_eq, constfn,
4831 CONST_CAST (char *, rep),
4832 tree_ctz (len));
4834 if (result)
4836 /* Return the value in the proper mode for this function. */
4837 if (GET_MODE (result) == mode)
4838 return result;
4840 if (target != 0)
4842 convert_move (target, result, 0);
4843 return target;
4846 return convert_to_mode (mode, result, 0);
4849 return NULL_RTX;
4852 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4853 if we failed the caller should emit a normal call, otherwise try to get
4854 the result in TARGET, if convenient. */
4856 static rtx
4857 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4859 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4860 return NULL_RTX;
4862 tree arg1 = CALL_EXPR_ARG (exp, 0);
4863 tree arg2 = CALL_EXPR_ARG (exp, 1);
4865 /* Due to the performance benefit, always inline the calls first. */
4866 rtx result = NULL_RTX;
4867 result = inline_expand_builtin_bytecmp (exp, target);
4868 if (result)
4869 return result;
4871 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4872 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4873 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4874 return NULL_RTX;
4876 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4877 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4879 /* If we don't have POINTER_TYPE, call the function. */
4880 if (arg1_align == 0 || arg2_align == 0)
4881 return NULL_RTX;
4883 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4884 arg1 = builtin_save_expr (arg1);
4885 arg2 = builtin_save_expr (arg2);
4887 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4888 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4890 /* Try to call cmpstrsi. */
4891 if (cmpstr_icode != CODE_FOR_nothing)
4892 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4893 MIN (arg1_align, arg2_align));
4895 /* Try to determine at least one length and call cmpstrnsi. */
4896 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4898 tree len;
4899 rtx arg3_rtx;
4901 tree len1 = c_strlen (arg1, 1);
4902 tree len2 = c_strlen (arg2, 1);
4904 if (len1)
4905 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4906 if (len2)
4907 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4909 /* If we don't have a constant length for the first, use the length
4910 of the second, if we know it. We don't require a constant for
4911 this case; some cost analysis could be done if both are available
4912 but neither is constant. For now, assume they're equally cheap,
4913 unless one has side effects. If both strings have constant lengths,
4914 use the smaller. */
4916 if (!len1)
4917 len = len2;
4918 else if (!len2)
4919 len = len1;
4920 else if (TREE_SIDE_EFFECTS (len1))
4921 len = len2;
4922 else if (TREE_SIDE_EFFECTS (len2))
4923 len = len1;
4924 else if (TREE_CODE (len1) != INTEGER_CST)
4925 len = len2;
4926 else if (TREE_CODE (len2) != INTEGER_CST)
4927 len = len1;
4928 else if (tree_int_cst_lt (len1, len2))
4929 len = len1;
4930 else
4931 len = len2;
4933 /* If both arguments have side effects, we cannot optimize. */
4934 if (len && !TREE_SIDE_EFFECTS (len))
4936 arg3_rtx = expand_normal (len);
4937 result = expand_cmpstrn_or_cmpmem
4938 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4939 arg3_rtx, MIN (arg1_align, arg2_align));
4943 tree fndecl = get_callee_fndecl (exp);
4944 if (result)
4946 /* Return the value in the proper mode for this function. */
4947 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4948 if (GET_MODE (result) == mode)
4949 return result;
4950 if (target == 0)
4951 return convert_to_mode (mode, result, 0);
4952 convert_move (target, result, 0);
4953 return target;
4956 /* Expand the library call ourselves using a stabilized argument
4957 list to avoid re-evaluating the function's arguments twice. */
4958 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4959 copy_warning (fn, exp);
4960 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4961 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4962 return expand_call (fn, target, target == const0_rtx);
4965 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4966 NULL_RTX if we failed the caller should emit a normal call, otherwise
4967 try to get the result in TARGET, if convenient. */
4969 static rtx
4970 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4971 ATTRIBUTE_UNUSED machine_mode mode)
4973 if (!validate_arglist (exp,
4974 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4975 return NULL_RTX;
4977 tree arg1 = CALL_EXPR_ARG (exp, 0);
4978 tree arg2 = CALL_EXPR_ARG (exp, 1);
4979 tree arg3 = CALL_EXPR_ARG (exp, 2);
4981 location_t loc = EXPR_LOCATION (exp);
4982 tree len1 = c_strlen (arg1, 1);
4983 tree len2 = c_strlen (arg2, 1);
4985 /* Due to the performance benefit, always inline the calls first. */
4986 rtx result = NULL_RTX;
4987 result = inline_expand_builtin_bytecmp (exp, target);
4988 if (result)
4989 return result;
4991 /* If c_strlen can determine an expression for one of the string
4992 lengths, and it doesn't have side effects, then emit cmpstrnsi
4993 using length MIN(strlen(string)+1, arg3). */
4994 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4995 if (cmpstrn_icode == CODE_FOR_nothing)
4996 return NULL_RTX;
4998 tree len;
5000 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5001 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5003 if (len1)
5004 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5005 if (len2)
5006 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5008 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5010 /* If we don't have a constant length for the first, use the length
5011 of the second, if we know it. If neither string is constant length,
5012 use the given length argument. We don't require a constant for
5013 this case; some cost analysis could be done if both are available
5014 but neither is constant. For now, assume they're equally cheap,
5015 unless one has side effects. If both strings have constant lengths,
5016 use the smaller. */
5018 if (!len1 && !len2)
5019 len = len3;
5020 else if (!len1)
5021 len = len2;
5022 else if (!len2)
5023 len = len1;
5024 else if (TREE_SIDE_EFFECTS (len1))
5025 len = len2;
5026 else if (TREE_SIDE_EFFECTS (len2))
5027 len = len1;
5028 else if (TREE_CODE (len1) != INTEGER_CST)
5029 len = len2;
5030 else if (TREE_CODE (len2) != INTEGER_CST)
5031 len = len1;
5032 else if (tree_int_cst_lt (len1, len2))
5033 len = len1;
5034 else
5035 len = len2;
5037 /* If we are not using the given length, we must incorporate it here.
5038 The actual new length parameter will be MIN(len,arg3) in this case. */
5039 if (len != len3)
5041 len = fold_convert_loc (loc, sizetype, len);
5042 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5044 rtx arg1_rtx = get_memory_rtx (arg1, len);
5045 rtx arg2_rtx = get_memory_rtx (arg2, len);
5046 rtx arg3_rtx = expand_normal (len);
5047 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5048 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5049 MIN (arg1_align, arg2_align));
5051 tree fndecl = get_callee_fndecl (exp);
5052 if (result)
5054 /* Return the value in the proper mode for this function. */
5055 mode = TYPE_MODE (TREE_TYPE (exp));
5056 if (GET_MODE (result) == mode)
5057 return result;
5058 if (target == 0)
5059 return convert_to_mode (mode, result, 0);
5060 convert_move (target, result, 0);
5061 return target;
5064 /* Expand the library call ourselves using a stabilized argument
5065 list to avoid re-evaluating the function's arguments twice. */
5066 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5067 copy_warning (call, exp);
5068 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5069 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5070 return expand_call (call, target, target == const0_rtx);
5073 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5074 if that's convenient. */
5077 expand_builtin_saveregs (void)
5079 rtx val;
5080 rtx_insn *seq;
5082 /* Don't do __builtin_saveregs more than once in a function.
5083 Save the result of the first call and reuse it. */
5084 if (saveregs_value != 0)
5085 return saveregs_value;
5087 /* When this function is called, it means that registers must be
5088 saved on entry to this function. So we migrate the call to the
5089 first insn of this function. */
5091 start_sequence ();
5093 /* Do whatever the machine needs done in this case. */
5094 val = targetm.calls.expand_builtin_saveregs ();
5096 seq = get_insns ();
5097 end_sequence ();
5099 saveregs_value = val;
5101 /* Put the insns after the NOTE that starts the function. If this
5102 is inside a start_sequence, make the outer-level insn chain current, so
5103 the code is placed at the start of the function. */
5104 push_topmost_sequence ();
5105 emit_insn_after (seq, entry_of_function ());
5106 pop_topmost_sequence ();
5108 return val;
5111 /* Expand a call to __builtin_next_arg. */
5113 static rtx
5114 expand_builtin_next_arg (void)
5116 /* Checking arguments is already done in fold_builtin_next_arg
5117 that must be called before this function. */
5118 return expand_binop (ptr_mode, add_optab,
5119 crtl->args.internal_arg_pointer,
5120 crtl->args.arg_offset_rtx,
5121 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5124 /* Make it easier for the backends by protecting the valist argument
5125 from multiple evaluations. */
5127 static tree
5128 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5130 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5132 /* The current way of determining the type of valist is completely
5133 bogus. We should have the information on the va builtin instead. */
5134 if (!vatype)
5135 vatype = targetm.fn_abi_va_list (cfun->decl);
5137 if (TREE_CODE (vatype) == ARRAY_TYPE)
5139 if (TREE_SIDE_EFFECTS (valist))
5140 valist = save_expr (valist);
5142 /* For this case, the backends will be expecting a pointer to
5143 vatype, but it's possible we've actually been given an array
5144 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5145 So fix it. */
5146 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5148 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5149 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5152 else
5154 tree pt = build_pointer_type (vatype);
5156 if (! needs_lvalue)
5158 if (! TREE_SIDE_EFFECTS (valist))
5159 return valist;
5161 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5162 TREE_SIDE_EFFECTS (valist) = 1;
5165 if (TREE_SIDE_EFFECTS (valist))
5166 valist = save_expr (valist);
5167 valist = fold_build2_loc (loc, MEM_REF,
5168 vatype, valist, build_int_cst (pt, 0));
5171 return valist;
5174 /* The "standard" definition of va_list is void*. */
5176 tree
5177 std_build_builtin_va_list (void)
5179 return ptr_type_node;
5182 /* The "standard" abi va_list is va_list_type_node. */
5184 tree
5185 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5187 return va_list_type_node;
5190 /* The "standard" type of va_list is va_list_type_node. */
5192 tree
5193 std_canonical_va_list_type (tree type)
5195 tree wtype, htype;
5197 wtype = va_list_type_node;
5198 htype = type;
5200 if (TREE_CODE (wtype) == ARRAY_TYPE)
5202 /* If va_list is an array type, the argument may have decayed
5203 to a pointer type, e.g. by being passed to another function.
5204 In that case, unwrap both types so that we can compare the
5205 underlying records. */
5206 if (TREE_CODE (htype) == ARRAY_TYPE
5207 || POINTER_TYPE_P (htype))
5209 wtype = TREE_TYPE (wtype);
5210 htype = TREE_TYPE (htype);
5213 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5214 return va_list_type_node;
5216 return NULL_TREE;
5219 /* The "standard" implementation of va_start: just assign `nextarg' to
5220 the variable. */
5222 void
5223 std_expand_builtin_va_start (tree valist, rtx nextarg)
5225 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5226 convert_move (va_r, nextarg, 0);
5229 /* Expand EXP, a call to __builtin_va_start. */
5231 static rtx
5232 expand_builtin_va_start (tree exp)
5234 rtx nextarg;
5235 tree valist;
5236 location_t loc = EXPR_LOCATION (exp);
5238 if (call_expr_nargs (exp) < 2)
5240 error_at (loc, "too few arguments to function %<va_start%>");
5241 return const0_rtx;
5244 if (fold_builtin_next_arg (exp, true))
5245 return const0_rtx;
5247 nextarg = expand_builtin_next_arg ();
5248 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5250 if (targetm.expand_builtin_va_start)
5251 targetm.expand_builtin_va_start (valist, nextarg);
5252 else
5253 std_expand_builtin_va_start (valist, nextarg);
5255 return const0_rtx;
5258 /* Expand EXP, a call to __builtin_va_end. */
5260 static rtx
5261 expand_builtin_va_end (tree exp)
5263 tree valist = CALL_EXPR_ARG (exp, 0);
5265 /* Evaluate for side effects, if needed. I hate macros that don't
5266 do that. */
5267 if (TREE_SIDE_EFFECTS (valist))
5268 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5270 return const0_rtx;
5273 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5274 builtin rather than just as an assignment in stdarg.h because of the
5275 nastiness of array-type va_list types. */
5277 static rtx
5278 expand_builtin_va_copy (tree exp)
5280 tree dst, src, t;
5281 location_t loc = EXPR_LOCATION (exp);
5283 dst = CALL_EXPR_ARG (exp, 0);
5284 src = CALL_EXPR_ARG (exp, 1);
5286 dst = stabilize_va_list_loc (loc, dst, 1);
5287 src = stabilize_va_list_loc (loc, src, 0);
5289 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5291 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5293 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5294 TREE_SIDE_EFFECTS (t) = 1;
5295 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5297 else
5299 rtx dstb, srcb, size;
5301 /* Evaluate to pointers. */
5302 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5303 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5304 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5305 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5307 dstb = convert_memory_address (Pmode, dstb);
5308 srcb = convert_memory_address (Pmode, srcb);
5310 /* "Dereference" to BLKmode memories. */
5311 dstb = gen_rtx_MEM (BLKmode, dstb);
5312 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5313 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5314 srcb = gen_rtx_MEM (BLKmode, srcb);
5315 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5316 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5318 /* Copy. */
5319 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5322 return const0_rtx;
5325 /* Expand a call to one of the builtin functions __builtin_frame_address or
5326 __builtin_return_address. */
5328 static rtx
5329 expand_builtin_frame_address (tree fndecl, tree exp)
5331 /* The argument must be a nonnegative integer constant.
5332 It counts the number of frames to scan up the stack.
5333 The value is either the frame pointer value or the return
5334 address saved in that frame. */
5335 if (call_expr_nargs (exp) == 0)
5336 /* Warning about missing arg was already issued. */
5337 return const0_rtx;
5338 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5340 error ("invalid argument to %qD", fndecl);
5341 return const0_rtx;
5343 else
5345 /* Number of frames to scan up the stack. */
5346 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5348 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5350 /* Some ports cannot access arbitrary stack frames. */
5351 if (tem == NULL)
5353 warning (0, "unsupported argument to %qD", fndecl);
5354 return const0_rtx;
5357 if (count)
5359 /* Warn since no effort is made to ensure that any frame
5360 beyond the current one exists or can be safely reached. */
5361 warning (OPT_Wframe_address, "calling %qD with "
5362 "a nonzero argument is unsafe", fndecl);
5365 /* For __builtin_frame_address, return what we've got. */
5366 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5367 return tem;
5369 if (!REG_P (tem)
5370 && ! CONSTANT_P (tem))
5371 tem = copy_addr_to_reg (tem);
5372 return tem;
5376 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5377 failed and the caller should emit a normal call. */
5379 static rtx
5380 expand_builtin_alloca (tree exp)
5382 rtx op0;
5383 rtx result;
5384 unsigned int align;
5385 tree fndecl = get_callee_fndecl (exp);
5386 HOST_WIDE_INT max_size;
5387 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5388 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5389 bool valid_arglist
5390 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5391 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5392 VOID_TYPE)
5393 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5394 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5395 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5397 if (!valid_arglist)
5398 return NULL_RTX;
5400 /* Compute the argument. */
5401 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5403 /* Compute the alignment. */
5404 align = (fcode == BUILT_IN_ALLOCA
5405 ? BIGGEST_ALIGNMENT
5406 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5408 /* Compute the maximum size. */
5409 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5410 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5411 : -1);
5413 /* Allocate the desired space. If the allocation stems from the declaration
5414 of a variable-sized object, it cannot accumulate. */
5415 result
5416 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5417 result = convert_memory_address (ptr_mode, result);
5419 /* Dynamic allocations for variables are recorded during gimplification. */
5420 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5421 record_dynamic_alloc (exp);
5423 return result;
5426 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5427 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5428 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5429 handle_builtin_stack_restore function. */
5431 static rtx
5432 expand_asan_emit_allocas_unpoison (tree exp)
5434 tree arg0 = CALL_EXPR_ARG (exp, 0);
5435 tree arg1 = CALL_EXPR_ARG (exp, 1);
5436 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5437 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5438 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5439 stack_pointer_rtx, NULL_RTX, 0,
5440 OPTAB_LIB_WIDEN);
5441 off = convert_modes (ptr_mode, Pmode, off, 0);
5442 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5443 OPTAB_LIB_WIDEN);
5444 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5445 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5446 top, ptr_mode, bot, ptr_mode);
5447 return ret;
5450 /* Expand a call to bswap builtin in EXP.
5451 Return NULL_RTX if a normal call should be emitted rather than expanding the
5452 function in-line. If convenient, the result should be placed in TARGET.
5453 SUBTARGET may be used as the target for computing one of EXP's operands. */
5455 static rtx
5456 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5457 rtx subtarget)
5459 tree arg;
5460 rtx op0;
5462 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5463 return NULL_RTX;
5465 arg = CALL_EXPR_ARG (exp, 0);
5466 op0 = expand_expr (arg,
5467 subtarget && GET_MODE (subtarget) == target_mode
5468 ? subtarget : NULL_RTX,
5469 target_mode, EXPAND_NORMAL);
5470 if (GET_MODE (op0) != target_mode)
5471 op0 = convert_to_mode (target_mode, op0, 1);
5473 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5475 gcc_assert (target);
5477 return convert_to_mode (target_mode, target, 1);
5480 /* Expand a call to a unary builtin in EXP.
5481 Return NULL_RTX if a normal call should be emitted rather than expanding the
5482 function in-line. If convenient, the result should be placed in TARGET.
5483 SUBTARGET may be used as the target for computing one of EXP's operands. */
5485 static rtx
5486 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5487 rtx subtarget, optab op_optab)
5489 rtx op0;
5491 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5492 return NULL_RTX;
5494 /* Compute the argument. */
5495 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5496 (subtarget
5497 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5498 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5499 VOIDmode, EXPAND_NORMAL);
5500 /* Compute op, into TARGET if possible.
5501 Set TARGET to wherever the result comes back. */
5502 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5503 op_optab, op0, target, op_optab != clrsb_optab);
5504 gcc_assert (target);
5506 return convert_to_mode (target_mode, target, 0);
5509 /* Expand a call to __builtin_expect. We just return our argument
5510 as the builtin_expect semantic should've been already executed by
5511 tree branch prediction pass. */
5513 static rtx
5514 expand_builtin_expect (tree exp, rtx target)
5516 tree arg;
5518 if (call_expr_nargs (exp) < 2)
5519 return const0_rtx;
5520 arg = CALL_EXPR_ARG (exp, 0);
5522 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5523 /* When guessing was done, the hints should be already stripped away. */
5524 gcc_assert (!flag_guess_branch_prob
5525 || optimize == 0 || seen_error ());
5526 return target;
5529 /* Expand a call to __builtin_expect_with_probability. We just return our
5530 argument as the builtin_expect semantic should've been already executed by
5531 tree branch prediction pass. */
5533 static rtx
5534 expand_builtin_expect_with_probability (tree exp, rtx target)
5536 tree arg;
5538 if (call_expr_nargs (exp) < 3)
5539 return const0_rtx;
5540 arg = CALL_EXPR_ARG (exp, 0);
5542 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5543 /* When guessing was done, the hints should be already stripped away. */
5544 gcc_assert (!flag_guess_branch_prob
5545 || optimize == 0 || seen_error ());
5546 return target;
5550 /* Expand a call to __builtin_assume_aligned. We just return our first
5551 argument as the builtin_assume_aligned semantic should've been already
5552 executed by CCP. */
5554 static rtx
5555 expand_builtin_assume_aligned (tree exp, rtx target)
5557 if (call_expr_nargs (exp) < 2)
5558 return const0_rtx;
5559 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5560 EXPAND_NORMAL);
5561 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5562 && (call_expr_nargs (exp) < 3
5563 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5564 return target;
5567 void
5568 expand_builtin_trap (void)
5570 if (targetm.have_trap ())
5572 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5573 /* For trap insns when not accumulating outgoing args force
5574 REG_ARGS_SIZE note to prevent crossjumping of calls with
5575 different args sizes. */
5576 if (!ACCUMULATE_OUTGOING_ARGS)
5577 add_args_size_note (insn, stack_pointer_delta);
5579 else
5581 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5582 tree call_expr = build_call_expr (fn, 0);
5583 expand_call (call_expr, NULL_RTX, false);
5586 emit_barrier ();
5589 /* Expand a call to __builtin_unreachable. We do nothing except emit
5590 a barrier saying that control flow will not pass here.
5592 It is the responsibility of the program being compiled to ensure
5593 that control flow does never reach __builtin_unreachable. */
5594 static void
5595 expand_builtin_unreachable (void)
5597 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5598 to avoid this. */
5599 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5600 emit_barrier ();
5603 /* Expand EXP, a call to fabs, fabsf or fabsl.
5604 Return NULL_RTX if a normal call should be emitted rather than expanding
5605 the function inline. If convenient, the result should be placed
5606 in TARGET. SUBTARGET may be used as the target for computing
5607 the operand. */
5609 static rtx
5610 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5612 machine_mode mode;
5613 tree arg;
5614 rtx op0;
5616 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5617 return NULL_RTX;
5619 arg = CALL_EXPR_ARG (exp, 0);
5620 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5621 mode = TYPE_MODE (TREE_TYPE (arg));
5622 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5623 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5626 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5627 Return NULL is a normal call should be emitted rather than expanding the
5628 function inline. If convenient, the result should be placed in TARGET.
5629 SUBTARGET may be used as the target for computing the operand. */
5631 static rtx
5632 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5634 rtx op0, op1;
5635 tree arg;
5637 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5638 return NULL_RTX;
5640 arg = CALL_EXPR_ARG (exp, 0);
5641 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5643 arg = CALL_EXPR_ARG (exp, 1);
5644 op1 = expand_normal (arg);
5646 return expand_copysign (op0, op1, target);
5649 /* Emit a call to __builtin___clear_cache. */
5651 void
5652 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5654 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5655 BUILTIN_ASM_NAME_PTR
5656 (BUILT_IN_CLEAR_CACHE));
5658 emit_library_call (callee,
5659 LCT_NORMAL, VOIDmode,
5660 convert_memory_address (ptr_mode, begin), ptr_mode,
5661 convert_memory_address (ptr_mode, end), ptr_mode);
5664 /* Emit a call to __builtin___clear_cache, unless the target specifies
5665 it as do-nothing. This function can be used by trampoline
5666 finalizers to duplicate the effects of expanding a call to the
5667 clear_cache builtin. */
5669 void
5670 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5672 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5673 || CONST_INT_P (begin))
5674 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5675 || CONST_INT_P (end)));
5677 if (targetm.have_clear_cache ())
5679 /* We have a "clear_cache" insn, and it will handle everything. */
5680 class expand_operand ops[2];
5682 create_address_operand (&ops[0], begin);
5683 create_address_operand (&ops[1], end);
5685 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5686 return;
5688 else
5690 #ifndef CLEAR_INSN_CACHE
5691 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5692 does nothing. There is no need to call it. Do nothing. */
5693 return;
5694 #endif /* CLEAR_INSN_CACHE */
5697 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5700 /* Expand a call to __builtin___clear_cache. */
5702 static void
5703 expand_builtin___clear_cache (tree exp)
5705 tree begin, end;
5706 rtx begin_rtx, end_rtx;
5708 /* We must not expand to a library call. If we did, any
5709 fallback library function in libgcc that might contain a call to
5710 __builtin___clear_cache() would recurse infinitely. */
5711 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5713 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5714 return;
5717 begin = CALL_EXPR_ARG (exp, 0);
5718 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5720 end = CALL_EXPR_ARG (exp, 1);
5721 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5723 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5726 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5728 static rtx
5729 round_trampoline_addr (rtx tramp)
5731 rtx temp, addend, mask;
5733 /* If we don't need too much alignment, we'll have been guaranteed
5734 proper alignment by get_trampoline_type. */
5735 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5736 return tramp;
5738 /* Round address up to desired boundary. */
5739 temp = gen_reg_rtx (Pmode);
5740 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5741 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5743 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5744 temp, 0, OPTAB_LIB_WIDEN);
5745 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5746 temp, 0, OPTAB_LIB_WIDEN);
5748 return tramp;
5751 static rtx
5752 expand_builtin_init_trampoline (tree exp, bool onstack)
5754 tree t_tramp, t_func, t_chain;
5755 rtx m_tramp, r_tramp, r_chain, tmp;
5757 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5758 POINTER_TYPE, VOID_TYPE))
5759 return NULL_RTX;
5761 t_tramp = CALL_EXPR_ARG (exp, 0);
5762 t_func = CALL_EXPR_ARG (exp, 1);
5763 t_chain = CALL_EXPR_ARG (exp, 2);
5765 r_tramp = expand_normal (t_tramp);
5766 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5767 MEM_NOTRAP_P (m_tramp) = 1;
5769 /* If ONSTACK, the TRAMP argument should be the address of a field
5770 within the local function's FRAME decl. Either way, let's see if
5771 we can fill in the MEM_ATTRs for this memory. */
5772 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5773 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5775 /* Creator of a heap trampoline is responsible for making sure the
5776 address is aligned to at least STACK_BOUNDARY. Normally malloc
5777 will ensure this anyhow. */
5778 tmp = round_trampoline_addr (r_tramp);
5779 if (tmp != r_tramp)
5781 m_tramp = change_address (m_tramp, BLKmode, tmp);
5782 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5783 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5786 /* The FUNC argument should be the address of the nested function.
5787 Extract the actual function decl to pass to the hook. */
5788 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5789 t_func = TREE_OPERAND (t_func, 0);
5790 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5792 r_chain = expand_normal (t_chain);
5794 /* Generate insns to initialize the trampoline. */
5795 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5797 if (onstack)
5799 trampolines_created = 1;
5801 if (targetm.calls.custom_function_descriptors != 0)
5802 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5803 "trampoline generated for nested function %qD", t_func);
5806 return const0_rtx;
5809 static rtx
5810 expand_builtin_adjust_trampoline (tree exp)
5812 rtx tramp;
5814 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5815 return NULL_RTX;
5817 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5818 tramp = round_trampoline_addr (tramp);
5819 if (targetm.calls.trampoline_adjust_address)
5820 tramp = targetm.calls.trampoline_adjust_address (tramp);
5822 return tramp;
5825 /* Expand a call to the builtin descriptor initialization routine.
5826 A descriptor is made up of a couple of pointers to the static
5827 chain and the code entry in this order. */
5829 static rtx
5830 expand_builtin_init_descriptor (tree exp)
5832 tree t_descr, t_func, t_chain;
5833 rtx m_descr, r_descr, r_func, r_chain;
5835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5836 VOID_TYPE))
5837 return NULL_RTX;
5839 t_descr = CALL_EXPR_ARG (exp, 0);
5840 t_func = CALL_EXPR_ARG (exp, 1);
5841 t_chain = CALL_EXPR_ARG (exp, 2);
5843 r_descr = expand_normal (t_descr);
5844 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5845 MEM_NOTRAP_P (m_descr) = 1;
5846 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5848 r_func = expand_normal (t_func);
5849 r_chain = expand_normal (t_chain);
5851 /* Generate insns to initialize the descriptor. */
5852 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5853 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5854 POINTER_SIZE / BITS_PER_UNIT), r_func);
5856 return const0_rtx;
5859 /* Expand a call to the builtin descriptor adjustment routine. */
5861 static rtx
5862 expand_builtin_adjust_descriptor (tree exp)
5864 rtx tramp;
5866 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5867 return NULL_RTX;
5869 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5871 /* Unalign the descriptor to allow runtime identification. */
5872 tramp = plus_constant (ptr_mode, tramp,
5873 targetm.calls.custom_function_descriptors);
5875 return force_operand (tramp, NULL_RTX);
5878 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5879 function. The function first checks whether the back end provides
5880 an insn to implement signbit for the respective mode. If not, it
5881 checks whether the floating point format of the value is such that
5882 the sign bit can be extracted. If that is not the case, error out.
5883 EXP is the expression that is a call to the builtin function; if
5884 convenient, the result should be placed in TARGET. */
5885 static rtx
5886 expand_builtin_signbit (tree exp, rtx target)
5888 const struct real_format *fmt;
5889 scalar_float_mode fmode;
5890 scalar_int_mode rmode, imode;
5891 tree arg;
5892 int word, bitpos;
5893 enum insn_code icode;
5894 rtx temp;
5895 location_t loc = EXPR_LOCATION (exp);
5897 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5898 return NULL_RTX;
5900 arg = CALL_EXPR_ARG (exp, 0);
5901 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5902 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5903 fmt = REAL_MODE_FORMAT (fmode);
5905 arg = builtin_save_expr (arg);
5907 /* Expand the argument yielding a RTX expression. */
5908 temp = expand_normal (arg);
5910 /* Check if the back end provides an insn that handles signbit for the
5911 argument's mode. */
5912 icode = optab_handler (signbit_optab, fmode);
5913 if (icode != CODE_FOR_nothing)
5915 rtx_insn *last = get_last_insn ();
5916 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5917 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
5918 return this_target;
5919 delete_insns_since (last);
5922 /* For floating point formats without a sign bit, implement signbit
5923 as "ARG < 0.0". */
5924 bitpos = fmt->signbit_ro;
5925 if (bitpos < 0)
5927 /* But we can't do this if the format supports signed zero. */
5928 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5930 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5931 build_real (TREE_TYPE (arg), dconst0));
5932 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5935 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5937 imode = int_mode_for_mode (fmode).require ();
5938 temp = gen_lowpart (imode, temp);
5940 else
5942 imode = word_mode;
5943 /* Handle targets with different FP word orders. */
5944 if (FLOAT_WORDS_BIG_ENDIAN)
5945 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5946 else
5947 word = bitpos / BITS_PER_WORD;
5948 temp = operand_subword_force (temp, word, fmode);
5949 bitpos = bitpos % BITS_PER_WORD;
5952 /* Force the intermediate word_mode (or narrower) result into a
5953 register. This avoids attempting to create paradoxical SUBREGs
5954 of floating point modes below. */
5955 temp = force_reg (imode, temp);
5957 /* If the bitpos is within the "result mode" lowpart, the operation
5958 can be implement with a single bitwise AND. Otherwise, we need
5959 a right shift and an AND. */
5961 if (bitpos < GET_MODE_BITSIZE (rmode))
5963 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5965 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5966 temp = gen_lowpart (rmode, temp);
5967 temp = expand_binop (rmode, and_optab, temp,
5968 immed_wide_int_const (mask, rmode),
5969 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5971 else
5973 /* Perform a logical right shift to place the signbit in the least
5974 significant bit, then truncate the result to the desired mode
5975 and mask just this bit. */
5976 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5977 temp = gen_lowpart (rmode, temp);
5978 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5979 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5982 return temp;
5985 /* Expand fork or exec calls. TARGET is the desired target of the
5986 call. EXP is the call. FN is the
5987 identificator of the actual function. IGNORE is nonzero if the
5988 value is to be ignored. */
5990 static rtx
5991 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5993 tree id, decl;
5994 tree call;
5996 /* If we are not profiling, just call the function. */
5997 if (!profile_arc_flag)
5998 return NULL_RTX;
6000 /* Otherwise call the wrapper. This should be equivalent for the rest of
6001 compiler, so the code does not diverge, and the wrapper may run the
6002 code necessary for keeping the profiling sane. */
6004 switch (DECL_FUNCTION_CODE (fn))
6006 case BUILT_IN_FORK:
6007 id = get_identifier ("__gcov_fork");
6008 break;
6010 case BUILT_IN_EXECL:
6011 id = get_identifier ("__gcov_execl");
6012 break;
6014 case BUILT_IN_EXECV:
6015 id = get_identifier ("__gcov_execv");
6016 break;
6018 case BUILT_IN_EXECLP:
6019 id = get_identifier ("__gcov_execlp");
6020 break;
6022 case BUILT_IN_EXECLE:
6023 id = get_identifier ("__gcov_execle");
6024 break;
6026 case BUILT_IN_EXECVP:
6027 id = get_identifier ("__gcov_execvp");
6028 break;
6030 case BUILT_IN_EXECVE:
6031 id = get_identifier ("__gcov_execve");
6032 break;
6034 default:
6035 gcc_unreachable ();
6038 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6039 FUNCTION_DECL, id, TREE_TYPE (fn));
6040 DECL_EXTERNAL (decl) = 1;
6041 TREE_PUBLIC (decl) = 1;
6042 DECL_ARTIFICIAL (decl) = 1;
6043 TREE_NOTHROW (decl) = 1;
6044 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6045 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6046 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6047 return expand_call (call, target, ignore);
6052 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6053 the pointer in these functions is void*, the tree optimizers may remove
6054 casts. The mode computed in expand_builtin isn't reliable either, due
6055 to __sync_bool_compare_and_swap.
6057 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6058 group of builtins. This gives us log2 of the mode size. */
6060 static inline machine_mode
6061 get_builtin_sync_mode (int fcode_diff)
6063 /* The size is not negotiable, so ask not to get BLKmode in return
6064 if the target indicates that a smaller size would be better. */
6065 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6068 /* Expand the memory expression LOC and return the appropriate memory operand
6069 for the builtin_sync operations. */
6071 static rtx
6072 get_builtin_sync_mem (tree loc, machine_mode mode)
6074 rtx addr, mem;
6075 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6076 ? TREE_TYPE (TREE_TYPE (loc))
6077 : TREE_TYPE (loc));
6078 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6080 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6081 addr = convert_memory_address (addr_mode, addr);
6083 /* Note that we explicitly do not want any alias information for this
6084 memory, so that we kill all other live memories. Otherwise we don't
6085 satisfy the full barrier semantics of the intrinsic. */
6086 mem = gen_rtx_MEM (mode, addr);
6088 set_mem_addr_space (mem, addr_space);
6090 mem = validize_mem (mem);
6092 /* The alignment needs to be at least according to that of the mode. */
6093 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6094 get_pointer_alignment (loc)));
6095 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6096 MEM_VOLATILE_P (mem) = 1;
6098 return mem;
6101 /* Make sure an argument is in the right mode.
6102 EXP is the tree argument.
6103 MODE is the mode it should be in. */
6105 static rtx
6106 expand_expr_force_mode (tree exp, machine_mode mode)
6108 rtx val;
6109 machine_mode old_mode;
6111 if (TREE_CODE (exp) == SSA_NAME
6112 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6114 /* Undo argument promotion if possible, as combine might not
6115 be able to do it later due to MEM_VOLATILE_P uses in the
6116 patterns. */
6117 gimple *g = get_gimple_for_ssa_name (exp);
6118 if (g && gimple_assign_cast_p (g))
6120 tree rhs = gimple_assign_rhs1 (g);
6121 tree_code code = gimple_assign_rhs_code (g);
6122 if (CONVERT_EXPR_CODE_P (code)
6123 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6124 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6125 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6126 && (TYPE_PRECISION (TREE_TYPE (exp))
6127 > TYPE_PRECISION (TREE_TYPE (rhs))))
6128 exp = rhs;
6132 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6133 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6134 of CONST_INTs, where we know the old_mode only from the call argument. */
6136 old_mode = GET_MODE (val);
6137 if (old_mode == VOIDmode)
6138 old_mode = TYPE_MODE (TREE_TYPE (exp));
6139 val = convert_modes (mode, old_mode, val, 1);
6140 return val;
6144 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6145 EXP is the CALL_EXPR. CODE is the rtx code
6146 that corresponds to the arithmetic or logical operation from the name;
6147 an exception here is that NOT actually means NAND. TARGET is an optional
6148 place for us to store the results; AFTER is true if this is the
6149 fetch_and_xxx form. */
6151 static rtx
6152 expand_builtin_sync_operation (machine_mode mode, tree exp,
6153 enum rtx_code code, bool after,
6154 rtx target)
6156 rtx val, mem;
6157 location_t loc = EXPR_LOCATION (exp);
6159 if (code == NOT && warn_sync_nand)
6161 tree fndecl = get_callee_fndecl (exp);
6162 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6164 static bool warned_f_a_n, warned_n_a_f;
6166 switch (fcode)
6168 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6169 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6170 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6171 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6172 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6173 if (warned_f_a_n)
6174 break;
6176 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6177 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6178 warned_f_a_n = true;
6179 break;
6181 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6182 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6183 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6184 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6185 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6186 if (warned_n_a_f)
6187 break;
6189 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6190 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6191 warned_n_a_f = true;
6192 break;
6194 default:
6195 gcc_unreachable ();
6199 /* Expand the operands. */
6200 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6201 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6203 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6204 after);
6207 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6208 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6209 true if this is the boolean form. TARGET is a place for us to store the
6210 results; this is NOT optional if IS_BOOL is true. */
6212 static rtx
6213 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6214 bool is_bool, rtx target)
6216 rtx old_val, new_val, mem;
6217 rtx *pbool, *poval;
6219 /* Expand the operands. */
6220 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6221 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6222 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6224 pbool = poval = NULL;
6225 if (target != const0_rtx)
6227 if (is_bool)
6228 pbool = &target;
6229 else
6230 poval = &target;
6232 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6233 false, MEMMODEL_SYNC_SEQ_CST,
6234 MEMMODEL_SYNC_SEQ_CST))
6235 return NULL_RTX;
6237 return target;
6240 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6241 general form is actually an atomic exchange, and some targets only
6242 support a reduced form with the second argument being a constant 1.
6243 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6244 the results. */
6246 static rtx
6247 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6248 rtx target)
6250 rtx val, mem;
6252 /* Expand the operands. */
6253 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6254 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6256 return expand_sync_lock_test_and_set (target, mem, val);
6259 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6261 static void
6262 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6264 rtx mem;
6266 /* Expand the operands. */
6267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6269 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6272 /* Given an integer representing an ``enum memmodel'', verify its
6273 correctness and return the memory model enum. */
6275 static enum memmodel
6276 get_memmodel (tree exp)
6278 /* If the parameter is not a constant, it's a run time value so we'll just
6279 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6280 if (TREE_CODE (exp) != INTEGER_CST)
6281 return MEMMODEL_SEQ_CST;
6283 rtx op = expand_normal (exp);
6285 unsigned HOST_WIDE_INT val = INTVAL (op);
6286 if (targetm.memmodel_check)
6287 val = targetm.memmodel_check (val);
6288 else if (val & ~MEMMODEL_MASK)
6289 return MEMMODEL_SEQ_CST;
6291 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6292 if (memmodel_base (val) >= MEMMODEL_LAST)
6293 return MEMMODEL_SEQ_CST;
6295 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6296 be conservative and promote consume to acquire. */
6297 if (val == MEMMODEL_CONSUME)
6298 val = MEMMODEL_ACQUIRE;
6300 return (enum memmodel) val;
6303 /* Expand the __atomic_exchange intrinsic:
6304 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6305 EXP is the CALL_EXPR.
6306 TARGET is an optional place for us to store the results. */
6308 static rtx
6309 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6311 rtx val, mem;
6312 enum memmodel model;
6314 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6316 if (!flag_inline_atomics)
6317 return NULL_RTX;
6319 /* Expand the operands. */
6320 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6321 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6323 return expand_atomic_exchange (target, mem, val, model);
6326 /* Expand the __atomic_compare_exchange intrinsic:
6327 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6328 TYPE desired, BOOL weak,
6329 enum memmodel success,
6330 enum memmodel failure)
6331 EXP is the CALL_EXPR.
6332 TARGET is an optional place for us to store the results. */
6334 static rtx
6335 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6336 rtx target)
6338 rtx expect, desired, mem, oldval;
6339 rtx_code_label *label;
6340 tree weak;
6341 bool is_weak;
6343 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6344 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6346 if (failure > success)
6347 success = MEMMODEL_SEQ_CST;
6349 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6351 failure = MEMMODEL_SEQ_CST;
6352 success = MEMMODEL_SEQ_CST;
6356 if (!flag_inline_atomics)
6357 return NULL_RTX;
6359 /* Expand the operands. */
6360 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6362 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6363 expect = convert_memory_address (Pmode, expect);
6364 expect = gen_rtx_MEM (mode, expect);
6365 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6367 weak = CALL_EXPR_ARG (exp, 3);
6368 is_weak = false;
6369 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6370 is_weak = true;
6372 if (target == const0_rtx)
6373 target = NULL;
6375 /* Lest the rtl backend create a race condition with an imporoper store
6376 to memory, always create a new pseudo for OLDVAL. */
6377 oldval = NULL;
6379 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6380 is_weak, success, failure))
6381 return NULL_RTX;
6383 /* Conditionally store back to EXPECT, lest we create a race condition
6384 with an improper store to memory. */
6385 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6386 the normal case where EXPECT is totally private, i.e. a register. At
6387 which point the store can be unconditional. */
6388 label = gen_label_rtx ();
6389 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6390 GET_MODE (target), 1, label);
6391 emit_move_insn (expect, oldval);
6392 emit_label (label);
6394 return target;
6397 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6398 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6399 call. The weak parameter must be dropped to match the expected parameter
6400 list and the expected argument changed from value to pointer to memory
6401 slot. */
6403 static void
6404 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6406 unsigned int z;
6407 vec<tree, va_gc> *vec;
6409 vec_alloc (vec, 5);
6410 vec->quick_push (gimple_call_arg (call, 0));
6411 tree expected = gimple_call_arg (call, 1);
6412 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6413 TREE_TYPE (expected));
6414 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6415 if (expd != x)
6416 emit_move_insn (x, expd);
6417 tree v = make_tree (TREE_TYPE (expected), x);
6418 vec->quick_push (build1 (ADDR_EXPR,
6419 build_pointer_type (TREE_TYPE (expected)), v));
6420 vec->quick_push (gimple_call_arg (call, 2));
6421 /* Skip the boolean weak parameter. */
6422 for (z = 4; z < 6; z++)
6423 vec->quick_push (gimple_call_arg (call, z));
6424 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6425 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6426 gcc_assert (bytes_log2 < 5);
6427 built_in_function fncode
6428 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6429 + bytes_log2);
6430 tree fndecl = builtin_decl_explicit (fncode);
6431 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6432 fndecl);
6433 tree exp = build_call_vec (boolean_type_node, fn, vec);
6434 tree lhs = gimple_call_lhs (call);
6435 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6436 if (lhs)
6438 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6439 if (GET_MODE (boolret) != mode)
6440 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6441 x = force_reg (mode, x);
6442 write_complex_part (target, boolret, true, true);
6443 write_complex_part (target, x, false, false);
6447 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6449 void
6450 expand_ifn_atomic_compare_exchange (gcall *call)
6452 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6453 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6454 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6456 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6457 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6459 if (failure > success)
6460 success = MEMMODEL_SEQ_CST;
6462 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6464 failure = MEMMODEL_SEQ_CST;
6465 success = MEMMODEL_SEQ_CST;
6468 if (!flag_inline_atomics)
6470 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6471 return;
6474 /* Expand the operands. */
6475 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6477 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6478 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6480 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6482 rtx boolret = NULL;
6483 rtx oldval = NULL;
6485 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6486 is_weak, success, failure))
6488 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6489 return;
6492 tree lhs = gimple_call_lhs (call);
6493 if (lhs)
6495 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6496 if (GET_MODE (boolret) != mode)
6497 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6498 write_complex_part (target, boolret, true, true);
6499 write_complex_part (target, oldval, false, false);
6503 /* Expand the __atomic_load intrinsic:
6504 TYPE __atomic_load (TYPE *object, enum memmodel)
6505 EXP is the CALL_EXPR.
6506 TARGET is an optional place for us to store the results. */
6508 static rtx
6509 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6511 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6512 if (is_mm_release (model) || is_mm_acq_rel (model))
6513 model = MEMMODEL_SEQ_CST;
6515 if (!flag_inline_atomics)
6516 return NULL_RTX;
6518 /* Expand the operand. */
6519 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6521 return expand_atomic_load (target, mem, model);
6525 /* Expand the __atomic_store intrinsic:
6526 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6527 EXP is the CALL_EXPR.
6528 TARGET is an optional place for us to store the results. */
6530 static rtx
6531 expand_builtin_atomic_store (machine_mode mode, tree exp)
6533 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6534 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6535 || is_mm_release (model)))
6536 model = MEMMODEL_SEQ_CST;
6538 if (!flag_inline_atomics)
6539 return NULL_RTX;
6541 /* Expand the operands. */
6542 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6543 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6545 return expand_atomic_store (mem, val, model, false);
6548 /* Expand the __atomic_fetch_XXX intrinsic:
6549 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6550 EXP is the CALL_EXPR.
6551 TARGET is an optional place for us to store the results.
6552 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6553 FETCH_AFTER is true if returning the result of the operation.
6554 FETCH_AFTER is false if returning the value before the operation.
6555 IGNORE is true if the result is not used.
6556 EXT_CALL is the correct builtin for an external call if this cannot be
6557 resolved to an instruction sequence. */
6559 static rtx
6560 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6561 enum rtx_code code, bool fetch_after,
6562 bool ignore, enum built_in_function ext_call)
6564 rtx val, mem, ret;
6565 enum memmodel model;
6566 tree fndecl;
6567 tree addr;
6569 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6571 /* Expand the operands. */
6572 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6573 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6575 /* Only try generating instructions if inlining is turned on. */
6576 if (flag_inline_atomics)
6578 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6579 if (ret)
6580 return ret;
6583 /* Return if a different routine isn't needed for the library call. */
6584 if (ext_call == BUILT_IN_NONE)
6585 return NULL_RTX;
6587 /* Change the call to the specified function. */
6588 fndecl = get_callee_fndecl (exp);
6589 addr = CALL_EXPR_FN (exp);
6590 STRIP_NOPS (addr);
6592 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6593 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6595 /* If we will emit code after the call, the call cannot be a tail call.
6596 If it is emitted as a tail call, a barrier is emitted after it, and
6597 then all trailing code is removed. */
6598 if (!ignore)
6599 CALL_EXPR_TAILCALL (exp) = 0;
6601 /* Expand the call here so we can emit trailing code. */
6602 ret = expand_call (exp, target, ignore);
6604 /* Replace the original function just in case it matters. */
6605 TREE_OPERAND (addr, 0) = fndecl;
6607 /* Then issue the arithmetic correction to return the right result. */
6608 if (!ignore)
6610 if (code == NOT)
6612 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6613 OPTAB_LIB_WIDEN);
6614 ret = expand_simple_unop (mode, NOT, ret, target, true);
6616 else
6617 ret = expand_simple_binop (mode, code, ret, val, target, true,
6618 OPTAB_LIB_WIDEN);
6620 return ret;
6623 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6625 void
6626 expand_ifn_atomic_bit_test_and (gcall *call)
6628 tree ptr = gimple_call_arg (call, 0);
6629 tree bit = gimple_call_arg (call, 1);
6630 tree flag = gimple_call_arg (call, 2);
6631 tree lhs = gimple_call_lhs (call);
6632 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6633 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6634 enum rtx_code code;
6635 optab optab;
6636 class expand_operand ops[5];
6638 gcc_assert (flag_inline_atomics);
6640 if (gimple_call_num_args (call) == 5)
6641 model = get_memmodel (gimple_call_arg (call, 3));
6643 rtx mem = get_builtin_sync_mem (ptr, mode);
6644 rtx val = expand_expr_force_mode (bit, mode);
6646 switch (gimple_call_internal_fn (call))
6648 case IFN_ATOMIC_BIT_TEST_AND_SET:
6649 code = IOR;
6650 optab = atomic_bit_test_and_set_optab;
6651 break;
6652 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6653 code = XOR;
6654 optab = atomic_bit_test_and_complement_optab;
6655 break;
6656 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6657 code = AND;
6658 optab = atomic_bit_test_and_reset_optab;
6659 break;
6660 default:
6661 gcc_unreachable ();
6664 if (lhs == NULL_TREE)
6666 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6667 val, NULL_RTX, true, OPTAB_DIRECT);
6668 if (code == AND)
6669 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6670 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6671 return;
6674 rtx target;
6675 if (lhs)
6676 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6677 else
6678 target = gen_reg_rtx (mode);
6679 enum insn_code icode = direct_optab_handler (optab, mode);
6680 gcc_assert (icode != CODE_FOR_nothing);
6681 create_output_operand (&ops[0], target, mode);
6682 create_fixed_operand (&ops[1], mem);
6683 create_convert_operand_to (&ops[2], val, mode, true);
6684 create_integer_operand (&ops[3], model);
6685 create_integer_operand (&ops[4], integer_onep (flag));
6686 if (maybe_expand_insn (icode, 5, ops))
6687 return;
6689 rtx bitval = val;
6690 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6691 val, NULL_RTX, true, OPTAB_DIRECT);
6692 rtx maskval = val;
6693 if (code == AND)
6694 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6695 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6696 code, model, false);
6697 if (!result)
6699 bool is_atomic = gimple_call_num_args (call) == 5;
6700 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6701 tree fndecl = gimple_call_addr_fndecl (tcall);
6702 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6703 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6704 make_tree (type, val),
6705 is_atomic
6706 ? gimple_call_arg (call, 3)
6707 : integer_zero_node);
6708 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6709 mode, !lhs);
6711 if (!lhs)
6712 return;
6713 if (integer_onep (flag))
6715 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6716 NULL_RTX, true, OPTAB_DIRECT);
6717 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6718 true, OPTAB_DIRECT);
6720 else
6721 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6722 OPTAB_DIRECT);
6723 if (result != target)
6724 emit_move_insn (target, result);
6727 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6729 void
6730 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6732 tree cmp = gimple_call_arg (call, 0);
6733 tree ptr = gimple_call_arg (call, 1);
6734 tree arg = gimple_call_arg (call, 2);
6735 tree lhs = gimple_call_lhs (call);
6736 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6737 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6738 optab optab;
6739 rtx_code code;
6740 class expand_operand ops[5];
6742 gcc_assert (flag_inline_atomics);
6744 if (gimple_call_num_args (call) == 5)
6745 model = get_memmodel (gimple_call_arg (call, 3));
6747 rtx mem = get_builtin_sync_mem (ptr, mode);
6748 rtx op = expand_expr_force_mode (arg, mode);
6750 switch (gimple_call_internal_fn (call))
6752 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6753 code = PLUS;
6754 optab = atomic_add_fetch_cmp_0_optab;
6755 break;
6756 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6757 code = MINUS;
6758 optab = atomic_sub_fetch_cmp_0_optab;
6759 break;
6760 case IFN_ATOMIC_AND_FETCH_CMP_0:
6761 code = AND;
6762 optab = atomic_and_fetch_cmp_0_optab;
6763 break;
6764 case IFN_ATOMIC_OR_FETCH_CMP_0:
6765 code = IOR;
6766 optab = atomic_or_fetch_cmp_0_optab;
6767 break;
6768 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6769 code = XOR;
6770 optab = atomic_xor_fetch_cmp_0_optab;
6771 break;
6772 default:
6773 gcc_unreachable ();
6776 enum rtx_code comp = UNKNOWN;
6777 switch (tree_to_uhwi (cmp))
6779 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6780 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6781 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6782 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6783 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6784 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6785 default: gcc_unreachable ();
6788 rtx target;
6789 if (lhs == NULL_TREE)
6790 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6791 else
6792 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6793 enum insn_code icode = direct_optab_handler (optab, mode);
6794 gcc_assert (icode != CODE_FOR_nothing);
6795 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6796 create_fixed_operand (&ops[1], mem);
6797 create_convert_operand_to (&ops[2], op, mode, true);
6798 create_integer_operand (&ops[3], model);
6799 create_integer_operand (&ops[4], comp);
6800 if (maybe_expand_insn (icode, 5, ops))
6801 return;
6803 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6804 code, model, true);
6805 if (!result)
6807 bool is_atomic = gimple_call_num_args (call) == 5;
6808 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6809 tree fndecl = gimple_call_addr_fndecl (tcall);
6810 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6811 tree exp = build_call_nary (type, tcall,
6812 2 + is_atomic, ptr, arg,
6813 is_atomic
6814 ? gimple_call_arg (call, 3)
6815 : integer_zero_node);
6816 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6817 mode, !lhs);
6820 if (lhs)
6822 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6823 0, 1);
6824 if (result != target)
6825 emit_move_insn (target, result);
6829 /* Expand an atomic clear operation.
6830 void _atomic_clear (BOOL *obj, enum memmodel)
6831 EXP is the call expression. */
6833 static rtx
6834 expand_builtin_atomic_clear (tree exp)
6836 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6837 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6838 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6840 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6841 model = MEMMODEL_SEQ_CST;
6843 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6844 Failing that, a store is issued by __atomic_store. The only way this can
6845 fail is if the bool type is larger than a word size. Unlikely, but
6846 handle it anyway for completeness. Assume a single threaded model since
6847 there is no atomic support in this case, and no barriers are required. */
6848 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6849 if (!ret)
6850 emit_move_insn (mem, const0_rtx);
6851 return const0_rtx;
6854 /* Expand an atomic test_and_set operation.
6855 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6856 EXP is the call expression. */
6858 static rtx
6859 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6861 rtx mem;
6862 enum memmodel model;
6863 machine_mode mode;
6865 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6866 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6867 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6869 return expand_atomic_test_and_set (target, mem, model);
6873 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6874 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6876 static tree
6877 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6879 int size;
6880 machine_mode mode;
6881 unsigned int mode_align, type_align;
6883 if (TREE_CODE (arg0) != INTEGER_CST)
6884 return NULL_TREE;
6886 /* We need a corresponding integer mode for the access to be lock-free. */
6887 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6888 if (!int_mode_for_size (size, 0).exists (&mode))
6889 return boolean_false_node;
6891 mode_align = GET_MODE_ALIGNMENT (mode);
6893 if (TREE_CODE (arg1) == INTEGER_CST)
6895 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6897 /* Either this argument is null, or it's a fake pointer encoding
6898 the alignment of the object. */
6899 val = least_bit_hwi (val);
6900 val *= BITS_PER_UNIT;
6902 if (val == 0 || mode_align < val)
6903 type_align = mode_align;
6904 else
6905 type_align = val;
6907 else
6909 tree ttype = TREE_TYPE (arg1);
6911 /* This function is usually invoked and folded immediately by the front
6912 end before anything else has a chance to look at it. The pointer
6913 parameter at this point is usually cast to a void *, so check for that
6914 and look past the cast. */
6915 if (CONVERT_EXPR_P (arg1)
6916 && POINTER_TYPE_P (ttype)
6917 && VOID_TYPE_P (TREE_TYPE (ttype))
6918 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6919 arg1 = TREE_OPERAND (arg1, 0);
6921 ttype = TREE_TYPE (arg1);
6922 gcc_assert (POINTER_TYPE_P (ttype));
6924 /* Get the underlying type of the object. */
6925 ttype = TREE_TYPE (ttype);
6926 type_align = TYPE_ALIGN (ttype);
6929 /* If the object has smaller alignment, the lock free routines cannot
6930 be used. */
6931 if (type_align < mode_align)
6932 return boolean_false_node;
6934 /* Check if a compare_and_swap pattern exists for the mode which represents
6935 the required size. The pattern is not allowed to fail, so the existence
6936 of the pattern indicates support is present. Also require that an
6937 atomic load exists for the required size. */
6938 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6939 return boolean_true_node;
6940 else
6941 return boolean_false_node;
6944 /* Return true if the parameters to call EXP represent an object which will
6945 always generate lock free instructions. The first argument represents the
6946 size of the object, and the second parameter is a pointer to the object
6947 itself. If NULL is passed for the object, then the result is based on
6948 typical alignment for an object of the specified size. Otherwise return
6949 false. */
6951 static rtx
6952 expand_builtin_atomic_always_lock_free (tree exp)
6954 tree size;
6955 tree arg0 = CALL_EXPR_ARG (exp, 0);
6956 tree arg1 = CALL_EXPR_ARG (exp, 1);
6958 if (TREE_CODE (arg0) != INTEGER_CST)
6960 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6961 return const0_rtx;
6964 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6965 if (size == boolean_true_node)
6966 return const1_rtx;
6967 return const0_rtx;
6970 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6971 is lock free on this architecture. */
6973 static tree
6974 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6976 if (!flag_inline_atomics)
6977 return NULL_TREE;
6979 /* If it isn't always lock free, don't generate a result. */
6980 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6981 return boolean_true_node;
6983 return NULL_TREE;
6986 /* Return true if the parameters to call EXP represent an object which will
6987 always generate lock free instructions. The first argument represents the
6988 size of the object, and the second parameter is a pointer to the object
6989 itself. If NULL is passed for the object, then the result is based on
6990 typical alignment for an object of the specified size. Otherwise return
6991 NULL*/
6993 static rtx
6994 expand_builtin_atomic_is_lock_free (tree exp)
6996 tree size;
6997 tree arg0 = CALL_EXPR_ARG (exp, 0);
6998 tree arg1 = CALL_EXPR_ARG (exp, 1);
7000 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7002 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7003 return NULL_RTX;
7006 if (!flag_inline_atomics)
7007 return NULL_RTX;
7009 /* If the value is known at compile time, return the RTX for it. */
7010 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7011 if (size == boolean_true_node)
7012 return const1_rtx;
7014 return NULL_RTX;
7017 /* Expand the __atomic_thread_fence intrinsic:
7018 void __atomic_thread_fence (enum memmodel)
7019 EXP is the CALL_EXPR. */
7021 static void
7022 expand_builtin_atomic_thread_fence (tree exp)
7024 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7025 expand_mem_thread_fence (model);
7028 /* Expand the __atomic_signal_fence intrinsic:
7029 void __atomic_signal_fence (enum memmodel)
7030 EXP is the CALL_EXPR. */
7032 static void
7033 expand_builtin_atomic_signal_fence (tree exp)
7035 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7036 expand_mem_signal_fence (model);
7039 /* Expand the __sync_synchronize intrinsic. */
7041 static void
7042 expand_builtin_sync_synchronize (void)
7044 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7047 static rtx
7048 expand_builtin_thread_pointer (tree exp, rtx target)
7050 enum insn_code icode;
7051 if (!validate_arglist (exp, VOID_TYPE))
7052 return const0_rtx;
7053 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7054 if (icode != CODE_FOR_nothing)
7056 class expand_operand op;
7057 /* If the target is not sutitable then create a new target. */
7058 if (target == NULL_RTX
7059 || !REG_P (target)
7060 || GET_MODE (target) != Pmode)
7061 target = gen_reg_rtx (Pmode);
7062 create_output_operand (&op, target, Pmode);
7063 expand_insn (icode, 1, &op);
7064 return target;
7066 error ("%<__builtin_thread_pointer%> is not supported on this target");
7067 return const0_rtx;
7070 static void
7071 expand_builtin_set_thread_pointer (tree exp)
7073 enum insn_code icode;
7074 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7075 return;
7076 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7077 if (icode != CODE_FOR_nothing)
7079 class expand_operand op;
7080 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7081 Pmode, EXPAND_NORMAL);
7082 create_input_operand (&op, val, Pmode);
7083 expand_insn (icode, 1, &op);
7084 return;
7086 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7090 /* Emit code to restore the current value of stack. */
7092 static void
7093 expand_stack_restore (tree var)
7095 rtx_insn *prev;
7096 rtx sa = expand_normal (var);
7098 sa = convert_memory_address (Pmode, sa);
7100 prev = get_last_insn ();
7101 emit_stack_restore (SAVE_BLOCK, sa);
7103 record_new_stack_level ();
7105 fixup_args_size_notes (prev, get_last_insn (), 0);
7108 /* Emit code to save the current value of stack. */
7110 static rtx
7111 expand_stack_save (void)
7113 rtx ret = NULL_RTX;
7115 emit_stack_save (SAVE_BLOCK, &ret);
7116 return ret;
7119 /* Emit code to get the openacc gang, worker or vector id or size. */
7121 static rtx
7122 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7124 const char *name;
7125 rtx fallback_retval;
7126 rtx_insn *(*gen_fn) (rtx, rtx);
7127 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7129 case BUILT_IN_GOACC_PARLEVEL_ID:
7130 name = "__builtin_goacc_parlevel_id";
7131 fallback_retval = const0_rtx;
7132 gen_fn = targetm.gen_oacc_dim_pos;
7133 break;
7134 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7135 name = "__builtin_goacc_parlevel_size";
7136 fallback_retval = const1_rtx;
7137 gen_fn = targetm.gen_oacc_dim_size;
7138 break;
7139 default:
7140 gcc_unreachable ();
7143 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7145 error ("%qs only supported in OpenACC code", name);
7146 return const0_rtx;
7149 tree arg = CALL_EXPR_ARG (exp, 0);
7150 if (TREE_CODE (arg) != INTEGER_CST)
7152 error ("non-constant argument 0 to %qs", name);
7153 return const0_rtx;
7156 int dim = TREE_INT_CST_LOW (arg);
7157 switch (dim)
7159 case GOMP_DIM_GANG:
7160 case GOMP_DIM_WORKER:
7161 case GOMP_DIM_VECTOR:
7162 break;
7163 default:
7164 error ("illegal argument 0 to %qs", name);
7165 return const0_rtx;
7168 if (ignore)
7169 return target;
7171 if (target == NULL_RTX)
7172 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7174 if (!targetm.have_oacc_dim_size ())
7176 emit_move_insn (target, fallback_retval);
7177 return target;
7180 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7181 emit_insn (gen_fn (reg, GEN_INT (dim)));
7182 if (reg != target)
7183 emit_move_insn (target, reg);
7185 return target;
7188 /* Expand a string compare operation using a sequence of char comparison
7189 to get rid of the calling overhead, with result going to TARGET if
7190 that's convenient.
7192 VAR_STR is the variable string source;
7193 CONST_STR is the constant string source;
7194 LENGTH is the number of chars to compare;
7195 CONST_STR_N indicates which source string is the constant string;
7196 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7198 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7200 target = (int) (unsigned char) var_str[0]
7201 - (int) (unsigned char) const_str[0];
7202 if (target != 0)
7203 goto ne_label;
7205 target = (int) (unsigned char) var_str[length - 2]
7206 - (int) (unsigned char) const_str[length - 2];
7207 if (target != 0)
7208 goto ne_label;
7209 target = (int) (unsigned char) var_str[length - 1]
7210 - (int) (unsigned char) const_str[length - 1];
7211 ne_label:
7214 static rtx
7215 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7216 unsigned HOST_WIDE_INT length,
7217 int const_str_n, machine_mode mode)
7219 HOST_WIDE_INT offset = 0;
7220 rtx var_rtx_array
7221 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7222 rtx var_rtx = NULL_RTX;
7223 rtx const_rtx = NULL_RTX;
7224 rtx result = target ? target : gen_reg_rtx (mode);
7225 rtx_code_label *ne_label = gen_label_rtx ();
7226 tree unit_type_node = unsigned_char_type_node;
7227 scalar_int_mode unit_mode
7228 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7230 start_sequence ();
7232 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7234 var_rtx
7235 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7236 const_rtx = c_readstr (const_str + offset, unit_mode);
7237 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7238 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7240 op0 = convert_modes (mode, unit_mode, op0, 1);
7241 op1 = convert_modes (mode, unit_mode, op1, 1);
7242 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7243 result, 1, OPTAB_WIDEN);
7245 /* Force the difference into result register. We cannot reassign
7246 result here ("result = diff") or we may end up returning
7247 uninitialized result when expand_simple_binop allocates a new
7248 pseudo-register for returning. */
7249 if (diff != result)
7250 emit_move_insn (result, diff);
7252 if (i < length - 1)
7253 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7254 mode, true, ne_label);
7255 offset += GET_MODE_SIZE (unit_mode);
7258 emit_label (ne_label);
7259 rtx_insn *insns = get_insns ();
7260 end_sequence ();
7261 emit_insn (insns);
7263 return result;
7266 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7267 to TARGET if that's convenient.
7268 If the call is not been inlined, return NULL_RTX. */
7270 static rtx
7271 inline_expand_builtin_bytecmp (tree exp, rtx target)
7273 tree fndecl = get_callee_fndecl (exp);
7274 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7275 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7277 /* Do NOT apply this inlining expansion when optimizing for size or
7278 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7279 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7280 return NULL_RTX;
7282 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7283 || fcode == BUILT_IN_STRNCMP
7284 || fcode == BUILT_IN_MEMCMP);
7286 /* On a target where the type of the call (int) has same or narrower presicion
7287 than unsigned char, give up the inlining expansion. */
7288 if (TYPE_PRECISION (unsigned_char_type_node)
7289 >= TYPE_PRECISION (TREE_TYPE (exp)))
7290 return NULL_RTX;
7292 tree arg1 = CALL_EXPR_ARG (exp, 0);
7293 tree arg2 = CALL_EXPR_ARG (exp, 1);
7294 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7296 unsigned HOST_WIDE_INT len1 = 0;
7297 unsigned HOST_WIDE_INT len2 = 0;
7298 unsigned HOST_WIDE_INT len3 = 0;
7300 /* Get the object representation of the initializers of ARG1 and ARG2
7301 as strings, provided they refer to constant objects, with their byte
7302 sizes in LEN1 and LEN2, respectively. */
7303 const char *bytes1 = getbyterep (arg1, &len1);
7304 const char *bytes2 = getbyterep (arg2, &len2);
7306 /* Fail if neither argument refers to an initialized constant. */
7307 if (!bytes1 && !bytes2)
7308 return NULL_RTX;
7310 if (is_ncmp)
7312 /* Fail if the memcmp/strncmp bound is not a constant. */
7313 if (!tree_fits_uhwi_p (len3_tree))
7314 return NULL_RTX;
7316 len3 = tree_to_uhwi (len3_tree);
7318 if (fcode == BUILT_IN_MEMCMP)
7320 /* Fail if the memcmp bound is greater than the size of either
7321 of the two constant objects. */
7322 if ((bytes1 && len1 < len3)
7323 || (bytes2 && len2 < len3))
7324 return NULL_RTX;
7328 if (fcode != BUILT_IN_MEMCMP)
7330 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7331 and LEN2 to the length of the nul-terminated string stored
7332 in each. */
7333 if (bytes1 != NULL)
7334 len1 = strnlen (bytes1, len1) + 1;
7335 if (bytes2 != NULL)
7336 len2 = strnlen (bytes2, len2) + 1;
7339 /* See inline_string_cmp. */
7340 int const_str_n;
7341 if (!len1)
7342 const_str_n = 2;
7343 else if (!len2)
7344 const_str_n = 1;
7345 else if (len2 > len1)
7346 const_str_n = 1;
7347 else
7348 const_str_n = 2;
7350 /* For strncmp only, compute the new bound as the smallest of
7351 the lengths of the two strings (plus 1) and the bound provided
7352 to the function. */
7353 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7354 if (is_ncmp && len3 < bound)
7355 bound = len3;
7357 /* If the bound of the comparison is larger than the threshold,
7358 do nothing. */
7359 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7360 return NULL_RTX;
7362 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7364 /* Now, start inline expansion the call. */
7365 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7366 (const_str_n == 1) ? bytes1 : bytes2, bound,
7367 const_str_n, mode);
7370 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7371 represents the size of the first argument to that call, or VOIDmode
7372 if the argument is a pointer. IGNORE will be true if the result
7373 isn't used. */
7374 static rtx
7375 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7376 bool ignore)
7378 rtx val, failsafe;
7379 unsigned nargs = call_expr_nargs (exp);
7381 tree arg0 = CALL_EXPR_ARG (exp, 0);
7383 if (mode == VOIDmode)
7385 mode = TYPE_MODE (TREE_TYPE (arg0));
7386 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7389 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7391 /* An optional second argument can be used as a failsafe value on
7392 some machines. If it isn't present, then the failsafe value is
7393 assumed to be 0. */
7394 if (nargs > 1)
7396 tree arg1 = CALL_EXPR_ARG (exp, 1);
7397 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7399 else
7400 failsafe = const0_rtx;
7402 /* If the result isn't used, the behavior is undefined. It would be
7403 nice to emit a warning here, but path splitting means this might
7404 happen with legitimate code. So simply drop the builtin
7405 expansion in that case; we've handled any side-effects above. */
7406 if (ignore)
7407 return const0_rtx;
7409 /* If we don't have a suitable target, create one to hold the result. */
7410 if (target == NULL || GET_MODE (target) != mode)
7411 target = gen_reg_rtx (mode);
7413 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7414 val = convert_modes (mode, VOIDmode, val, false);
7416 return targetm.speculation_safe_value (mode, target, val, failsafe);
7419 /* Expand an expression EXP that calls a built-in function,
7420 with result going to TARGET if that's convenient
7421 (and in mode MODE if that's convenient).
7422 SUBTARGET may be used as the target for computing one of EXP's operands.
7423 IGNORE is nonzero if the value is to be ignored. */
7426 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7427 int ignore)
7429 tree fndecl = get_callee_fndecl (exp);
7430 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7431 int flags;
7433 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7434 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7436 /* When ASan is enabled, we don't want to expand some memory/string
7437 builtins and rely on libsanitizer's hooks. This allows us to avoid
7438 redundant checks and be sure, that possible overflow will be detected
7439 by ASan. */
7441 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7442 if (param_asan_kernel_mem_intrinsic_prefix
7443 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7444 | SANITIZE_KERNEL_HWADDRESS))
7445 switch (fcode)
7447 rtx save_decl_rtl, ret;
7448 case BUILT_IN_MEMCPY:
7449 case BUILT_IN_MEMMOVE:
7450 case BUILT_IN_MEMSET:
7451 save_decl_rtl = DECL_RTL (fndecl);
7452 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7453 ret = expand_call (exp, target, ignore);
7454 DECL_RTL (fndecl) = save_decl_rtl;
7455 return ret;
7456 default:
7457 break;
7459 if (sanitize_flags_p (SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7460 return expand_call (exp, target, ignore);
7462 /* When not optimizing, generate calls to library functions for a certain
7463 set of builtins. */
7464 if (!optimize
7465 && !called_as_built_in (fndecl)
7466 && fcode != BUILT_IN_FORK
7467 && fcode != BUILT_IN_EXECL
7468 && fcode != BUILT_IN_EXECV
7469 && fcode != BUILT_IN_EXECLP
7470 && fcode != BUILT_IN_EXECLE
7471 && fcode != BUILT_IN_EXECVP
7472 && fcode != BUILT_IN_EXECVE
7473 && fcode != BUILT_IN_CLEAR_CACHE
7474 && !ALLOCA_FUNCTION_CODE_P (fcode)
7475 && fcode != BUILT_IN_FREE
7476 && (fcode != BUILT_IN_MEMSET
7477 || !(flag_inline_stringops & ILSOP_MEMSET))
7478 && (fcode != BUILT_IN_MEMCPY
7479 || !(flag_inline_stringops & ILSOP_MEMCPY))
7480 && (fcode != BUILT_IN_MEMMOVE
7481 || !(flag_inline_stringops & ILSOP_MEMMOVE))
7482 && (fcode != BUILT_IN_MEMCMP
7483 || !(flag_inline_stringops & ILSOP_MEMCMP)))
7484 return expand_call (exp, target, ignore);
7486 /* The built-in function expanders test for target == const0_rtx
7487 to determine whether the function's result will be ignored. */
7488 if (ignore)
7489 target = const0_rtx;
7491 /* If the result of a pure or const built-in function is ignored, and
7492 none of its arguments are volatile, we can avoid expanding the
7493 built-in call and just evaluate the arguments for side-effects. */
7494 if (target == const0_rtx
7495 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7496 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7498 bool volatilep = false;
7499 tree arg;
7500 call_expr_arg_iterator iter;
7502 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7503 if (TREE_THIS_VOLATILE (arg))
7505 volatilep = true;
7506 break;
7509 if (! volatilep)
7511 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7512 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7513 return const0_rtx;
7517 switch (fcode)
7519 CASE_FLT_FN (BUILT_IN_FABS):
7520 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7521 case BUILT_IN_FABSD32:
7522 case BUILT_IN_FABSD64:
7523 case BUILT_IN_FABSD128:
7524 target = expand_builtin_fabs (exp, target, subtarget);
7525 if (target)
7526 return target;
7527 break;
7529 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7530 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7531 target = expand_builtin_copysign (exp, target, subtarget);
7532 if (target)
7533 return target;
7534 break;
7536 /* Just do a normal library call if we were unable to fold
7537 the values. */
7538 CASE_FLT_FN (BUILT_IN_CABS):
7539 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7540 break;
7542 CASE_FLT_FN (BUILT_IN_FMA):
7543 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7544 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7545 if (target)
7546 return target;
7547 break;
7549 CASE_FLT_FN (BUILT_IN_ILOGB):
7550 if (! flag_unsafe_math_optimizations)
7551 break;
7552 gcc_fallthrough ();
7553 CASE_FLT_FN (BUILT_IN_ISINF):
7554 CASE_FLT_FN (BUILT_IN_FINITE):
7555 case BUILT_IN_ISFINITE:
7556 case BUILT_IN_ISNORMAL:
7557 target = expand_builtin_interclass_mathfn (exp, target);
7558 if (target)
7559 return target;
7560 break;
7562 case BUILT_IN_ISSIGNALING:
7563 target = expand_builtin_issignaling (exp, target);
7564 if (target)
7565 return target;
7566 break;
7568 CASE_FLT_FN (BUILT_IN_ICEIL):
7569 CASE_FLT_FN (BUILT_IN_LCEIL):
7570 CASE_FLT_FN (BUILT_IN_LLCEIL):
7571 CASE_FLT_FN (BUILT_IN_LFLOOR):
7572 CASE_FLT_FN (BUILT_IN_IFLOOR):
7573 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7574 target = expand_builtin_int_roundingfn (exp, target);
7575 if (target)
7576 return target;
7577 break;
7579 CASE_FLT_FN (BUILT_IN_IRINT):
7580 CASE_FLT_FN (BUILT_IN_LRINT):
7581 CASE_FLT_FN (BUILT_IN_LLRINT):
7582 CASE_FLT_FN (BUILT_IN_IROUND):
7583 CASE_FLT_FN (BUILT_IN_LROUND):
7584 CASE_FLT_FN (BUILT_IN_LLROUND):
7585 target = expand_builtin_int_roundingfn_2 (exp, target);
7586 if (target)
7587 return target;
7588 break;
7590 CASE_FLT_FN (BUILT_IN_POWI):
7591 target = expand_builtin_powi (exp, target);
7592 if (target)
7593 return target;
7594 break;
7596 CASE_FLT_FN (BUILT_IN_CEXPI):
7597 target = expand_builtin_cexpi (exp, target);
7598 gcc_assert (target);
7599 return target;
7601 CASE_FLT_FN (BUILT_IN_SIN):
7602 CASE_FLT_FN (BUILT_IN_COS):
7603 if (! flag_unsafe_math_optimizations)
7604 break;
7605 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7606 if (target)
7607 return target;
7608 break;
7610 CASE_FLT_FN (BUILT_IN_SINCOS):
7611 if (! flag_unsafe_math_optimizations)
7612 break;
7613 target = expand_builtin_sincos (exp);
7614 if (target)
7615 return target;
7616 break;
7618 case BUILT_IN_FEGETROUND:
7619 target = expand_builtin_fegetround (exp, target, target_mode);
7620 if (target)
7621 return target;
7622 break;
7624 case BUILT_IN_FECLEAREXCEPT:
7625 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7626 feclearexcept_optab);
7627 if (target)
7628 return target;
7629 break;
7631 case BUILT_IN_FERAISEEXCEPT:
7632 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7633 feraiseexcept_optab);
7634 if (target)
7635 return target;
7636 break;
7638 case BUILT_IN_APPLY_ARGS:
7639 return expand_builtin_apply_args ();
7641 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7642 FUNCTION with a copy of the parameters described by
7643 ARGUMENTS, and ARGSIZE. It returns a block of memory
7644 allocated on the stack into which is stored all the registers
7645 that might possibly be used for returning the result of a
7646 function. ARGUMENTS is the value returned by
7647 __builtin_apply_args. ARGSIZE is the number of bytes of
7648 arguments that must be copied. ??? How should this value be
7649 computed? We'll also need a safe worst case value for varargs
7650 functions. */
7651 case BUILT_IN_APPLY:
7652 if (!validate_arglist (exp, POINTER_TYPE,
7653 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7654 && !validate_arglist (exp, REFERENCE_TYPE,
7655 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7656 return const0_rtx;
7657 else
7659 rtx ops[3];
7661 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7662 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7663 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7665 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7668 /* __builtin_return (RESULT) causes the function to return the
7669 value described by RESULT. RESULT is address of the block of
7670 memory returned by __builtin_apply. */
7671 case BUILT_IN_RETURN:
7672 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7673 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7674 return const0_rtx;
7676 case BUILT_IN_SAVEREGS:
7677 return expand_builtin_saveregs ();
7679 case BUILT_IN_VA_ARG_PACK:
7680 /* All valid uses of __builtin_va_arg_pack () are removed during
7681 inlining. */
7682 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7683 return const0_rtx;
7685 case BUILT_IN_VA_ARG_PACK_LEN:
7686 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7687 inlining. */
7688 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7689 return const0_rtx;
7691 /* Return the address of the first anonymous stack arg. */
7692 case BUILT_IN_NEXT_ARG:
7693 if (fold_builtin_next_arg (exp, false))
7694 return const0_rtx;
7695 return expand_builtin_next_arg ();
7697 case BUILT_IN_CLEAR_CACHE:
7698 expand_builtin___clear_cache (exp);
7699 return const0_rtx;
7701 case BUILT_IN_CLASSIFY_TYPE:
7702 return expand_builtin_classify_type (exp);
7704 case BUILT_IN_CONSTANT_P:
7705 return const0_rtx;
7707 case BUILT_IN_FRAME_ADDRESS:
7708 case BUILT_IN_RETURN_ADDRESS:
7709 return expand_builtin_frame_address (fndecl, exp);
7711 /* Returns the address of the area where the structure is returned.
7712 0 otherwise. */
7713 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7714 if (call_expr_nargs (exp) != 0
7715 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7716 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7717 return const0_rtx;
7718 else
7719 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7721 CASE_BUILT_IN_ALLOCA:
7722 target = expand_builtin_alloca (exp);
7723 if (target)
7724 return target;
7725 break;
7727 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7728 return expand_asan_emit_allocas_unpoison (exp);
7730 case BUILT_IN_STACK_SAVE:
7731 return expand_stack_save ();
7733 case BUILT_IN_STACK_RESTORE:
7734 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7735 return const0_rtx;
7737 case BUILT_IN_BSWAP16:
7738 case BUILT_IN_BSWAP32:
7739 case BUILT_IN_BSWAP64:
7740 case BUILT_IN_BSWAP128:
7741 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7742 if (target)
7743 return target;
7744 break;
7746 CASE_INT_FN (BUILT_IN_FFS):
7747 target = expand_builtin_unop (target_mode, exp, target,
7748 subtarget, ffs_optab);
7749 if (target)
7750 return target;
7751 break;
7753 CASE_INT_FN (BUILT_IN_CLZ):
7754 target = expand_builtin_unop (target_mode, exp, target,
7755 subtarget, clz_optab);
7756 if (target)
7757 return target;
7758 break;
7760 CASE_INT_FN (BUILT_IN_CTZ):
7761 target = expand_builtin_unop (target_mode, exp, target,
7762 subtarget, ctz_optab);
7763 if (target)
7764 return target;
7765 break;
7767 CASE_INT_FN (BUILT_IN_CLRSB):
7768 target = expand_builtin_unop (target_mode, exp, target,
7769 subtarget, clrsb_optab);
7770 if (target)
7771 return target;
7772 break;
7774 CASE_INT_FN (BUILT_IN_POPCOUNT):
7775 target = expand_builtin_unop (target_mode, exp, target,
7776 subtarget, popcount_optab);
7777 if (target)
7778 return target;
7779 break;
7781 CASE_INT_FN (BUILT_IN_PARITY):
7782 target = expand_builtin_unop (target_mode, exp, target,
7783 subtarget, parity_optab);
7784 if (target)
7785 return target;
7786 break;
7788 case BUILT_IN_STRLEN:
7789 target = expand_builtin_strlen (exp, target, target_mode);
7790 if (target)
7791 return target;
7792 break;
7794 case BUILT_IN_STRNLEN:
7795 target = expand_builtin_strnlen (exp, target, target_mode);
7796 if (target)
7797 return target;
7798 break;
7800 case BUILT_IN_STRCPY:
7801 target = expand_builtin_strcpy (exp, target);
7802 if (target)
7803 return target;
7804 break;
7806 case BUILT_IN_STRNCPY:
7807 target = expand_builtin_strncpy (exp, target);
7808 if (target)
7809 return target;
7810 break;
7812 case BUILT_IN_STPCPY:
7813 target = expand_builtin_stpcpy (exp, target, mode);
7814 if (target)
7815 return target;
7816 break;
7818 case BUILT_IN_MEMCPY:
7819 target = expand_builtin_memcpy (exp, target);
7820 if (target)
7821 return target;
7822 break;
7824 case BUILT_IN_MEMMOVE:
7825 target = expand_builtin_memmove (exp, target);
7826 if (target)
7827 return target;
7828 break;
7830 case BUILT_IN_MEMPCPY:
7831 target = expand_builtin_mempcpy (exp, target);
7832 if (target)
7833 return target;
7834 break;
7836 case BUILT_IN_MEMSET:
7837 target = expand_builtin_memset (exp, target, mode);
7838 if (target)
7839 return target;
7840 break;
7842 case BUILT_IN_BZERO:
7843 target = expand_builtin_bzero (exp);
7844 if (target)
7845 return target;
7846 break;
7848 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7849 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7850 when changing it to a strcmp call. */
7851 case BUILT_IN_STRCMP_EQ:
7852 target = expand_builtin_memcmp (exp, target, true);
7853 if (target)
7854 return target;
7856 /* Change this call back to a BUILT_IN_STRCMP. */
7857 TREE_OPERAND (exp, 1)
7858 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7860 /* Delete the last parameter. */
7861 unsigned int i;
7862 vec<tree, va_gc> *arg_vec;
7863 vec_alloc (arg_vec, 2);
7864 for (i = 0; i < 2; i++)
7865 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7866 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7867 /* FALLTHROUGH */
7869 case BUILT_IN_STRCMP:
7870 target = expand_builtin_strcmp (exp, target);
7871 if (target)
7872 return target;
7873 break;
7875 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7876 back to a BUILT_IN_STRNCMP. */
7877 case BUILT_IN_STRNCMP_EQ:
7878 target = expand_builtin_memcmp (exp, target, true);
7879 if (target)
7880 return target;
7882 /* Change it back to a BUILT_IN_STRNCMP. */
7883 TREE_OPERAND (exp, 1)
7884 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7885 /* FALLTHROUGH */
7887 case BUILT_IN_STRNCMP:
7888 target = expand_builtin_strncmp (exp, target, mode);
7889 if (target)
7890 return target;
7891 break;
7893 case BUILT_IN_BCMP:
7894 case BUILT_IN_MEMCMP:
7895 case BUILT_IN_MEMCMP_EQ:
7896 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7897 if (target)
7898 return target;
7899 if (fcode == BUILT_IN_MEMCMP_EQ)
7901 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7902 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7904 break;
7906 case BUILT_IN_SETJMP:
7907 /* This should have been lowered to the builtins below. */
7908 gcc_unreachable ();
7910 case BUILT_IN_SETJMP_SETUP:
7911 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7912 and the receiver label. */
7913 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7915 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7916 VOIDmode, EXPAND_NORMAL);
7917 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7918 rtx_insn *label_r = label_rtx (label);
7920 expand_builtin_setjmp_setup (buf_addr, label_r);
7921 return const0_rtx;
7923 break;
7925 case BUILT_IN_SETJMP_RECEIVER:
7926 /* __builtin_setjmp_receiver is passed the receiver label. */
7927 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7929 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7930 rtx_insn *label_r = label_rtx (label);
7932 expand_builtin_setjmp_receiver (label_r);
7933 nonlocal_goto_handler_labels
7934 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7935 nonlocal_goto_handler_labels);
7936 /* ??? Do not let expand_label treat us as such since we would
7937 not want to be both on the list of non-local labels and on
7938 the list of forced labels. */
7939 FORCED_LABEL (label) = 0;
7940 return const0_rtx;
7942 break;
7944 /* __builtin_longjmp is passed a pointer to an array of five words.
7945 It's similar to the C library longjmp function but works with
7946 __builtin_setjmp above. */
7947 case BUILT_IN_LONGJMP:
7948 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7950 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7951 VOIDmode, EXPAND_NORMAL);
7952 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7954 if (value != const1_rtx)
7956 error ("%<__builtin_longjmp%> second argument must be 1");
7957 return const0_rtx;
7960 expand_builtin_longjmp (buf_addr, value);
7961 return const0_rtx;
7963 break;
7965 case BUILT_IN_NONLOCAL_GOTO:
7966 target = expand_builtin_nonlocal_goto (exp);
7967 if (target)
7968 return target;
7969 break;
7971 /* This updates the setjmp buffer that is its argument with the value
7972 of the current stack pointer. */
7973 case BUILT_IN_UPDATE_SETJMP_BUF:
7974 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7976 rtx buf_addr
7977 = expand_normal (CALL_EXPR_ARG (exp, 0));
7979 expand_builtin_update_setjmp_buf (buf_addr);
7980 return const0_rtx;
7982 break;
7984 case BUILT_IN_TRAP:
7985 case BUILT_IN_UNREACHABLE_TRAP:
7986 expand_builtin_trap ();
7987 return const0_rtx;
7989 case BUILT_IN_UNREACHABLE:
7990 expand_builtin_unreachable ();
7991 return const0_rtx;
7993 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7994 case BUILT_IN_SIGNBITD32:
7995 case BUILT_IN_SIGNBITD64:
7996 case BUILT_IN_SIGNBITD128:
7997 target = expand_builtin_signbit (exp, target);
7998 if (target)
7999 return target;
8000 break;
8002 /* Various hooks for the DWARF 2 __throw routine. */
8003 case BUILT_IN_UNWIND_INIT:
8004 expand_builtin_unwind_init ();
8005 return const0_rtx;
8006 case BUILT_IN_DWARF_CFA:
8007 return virtual_cfa_rtx;
8008 #ifdef DWARF2_UNWIND_INFO
8009 case BUILT_IN_DWARF_SP_COLUMN:
8010 return expand_builtin_dwarf_sp_column ();
8011 case BUILT_IN_INIT_DWARF_REG_SIZES:
8012 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8013 return const0_rtx;
8014 #endif
8015 case BUILT_IN_FROB_RETURN_ADDR:
8016 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8017 case BUILT_IN_EXTRACT_RETURN_ADDR:
8018 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8019 case BUILT_IN_EH_RETURN:
8020 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8021 CALL_EXPR_ARG (exp, 1));
8022 return const0_rtx;
8023 case BUILT_IN_EH_RETURN_DATA_REGNO:
8024 return expand_builtin_eh_return_data_regno (exp);
8025 case BUILT_IN_EXTEND_POINTER:
8026 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8027 case BUILT_IN_EH_POINTER:
8028 return expand_builtin_eh_pointer (exp);
8029 case BUILT_IN_EH_FILTER:
8030 return expand_builtin_eh_filter (exp);
8031 case BUILT_IN_EH_COPY_VALUES:
8032 return expand_builtin_eh_copy_values (exp);
8034 case BUILT_IN_VA_START:
8035 return expand_builtin_va_start (exp);
8036 case BUILT_IN_VA_END:
8037 return expand_builtin_va_end (exp);
8038 case BUILT_IN_VA_COPY:
8039 return expand_builtin_va_copy (exp);
8040 case BUILT_IN_EXPECT:
8041 return expand_builtin_expect (exp, target);
8042 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8043 return expand_builtin_expect_with_probability (exp, target);
8044 case BUILT_IN_ASSUME_ALIGNED:
8045 return expand_builtin_assume_aligned (exp, target);
8046 case BUILT_IN_PREFETCH:
8047 expand_builtin_prefetch (exp);
8048 return const0_rtx;
8050 case BUILT_IN_INIT_TRAMPOLINE:
8051 return expand_builtin_init_trampoline (exp, true);
8052 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8053 return expand_builtin_init_trampoline (exp, false);
8054 case BUILT_IN_ADJUST_TRAMPOLINE:
8055 return expand_builtin_adjust_trampoline (exp);
8057 case BUILT_IN_INIT_DESCRIPTOR:
8058 return expand_builtin_init_descriptor (exp);
8059 case BUILT_IN_ADJUST_DESCRIPTOR:
8060 return expand_builtin_adjust_descriptor (exp);
8062 case BUILT_IN_FORK:
8063 case BUILT_IN_EXECL:
8064 case BUILT_IN_EXECV:
8065 case BUILT_IN_EXECLP:
8066 case BUILT_IN_EXECLE:
8067 case BUILT_IN_EXECVP:
8068 case BUILT_IN_EXECVE:
8069 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8070 if (target)
8071 return target;
8072 break;
8074 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8075 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8076 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8077 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8078 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8079 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8080 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8081 if (target)
8082 return target;
8083 break;
8085 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8086 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8087 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8088 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8089 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8090 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8091 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8092 if (target)
8093 return target;
8094 break;
8096 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8097 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8098 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8099 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8100 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8101 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8102 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8103 if (target)
8104 return target;
8105 break;
8107 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8108 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8109 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8110 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8111 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8112 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8113 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8114 if (target)
8115 return target;
8116 break;
8118 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8119 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8120 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8121 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8122 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8123 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8124 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8125 if (target)
8126 return target;
8127 break;
8129 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8130 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8131 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8132 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8133 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8134 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8135 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8136 if (target)
8137 return target;
8138 break;
8140 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8141 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8142 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8143 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8144 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8145 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8146 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8147 if (target)
8148 return target;
8149 break;
8151 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8152 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8153 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8154 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8155 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8156 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8157 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8158 if (target)
8159 return target;
8160 break;
8162 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8163 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8164 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8165 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8166 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8167 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8168 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8169 if (target)
8170 return target;
8171 break;
8173 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8174 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8175 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8176 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8177 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8178 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8179 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8180 if (target)
8181 return target;
8182 break;
8184 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8185 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8186 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8187 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8188 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8189 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8190 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8191 if (target)
8192 return target;
8193 break;
8195 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8196 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8197 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8198 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8199 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8200 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8201 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8202 if (target)
8203 return target;
8204 break;
8206 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8207 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8208 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8209 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8210 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8211 if (mode == VOIDmode)
8212 mode = TYPE_MODE (boolean_type_node);
8213 if (!target || !register_operand (target, mode))
8214 target = gen_reg_rtx (mode);
8216 mode = get_builtin_sync_mode
8217 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8218 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8219 if (target)
8220 return target;
8221 break;
8223 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8224 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8225 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8226 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8227 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8228 mode = get_builtin_sync_mode
8229 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8230 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8231 if (target)
8232 return target;
8233 break;
8235 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8236 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8237 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8238 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8239 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8240 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8241 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8242 if (target)
8243 return target;
8244 break;
8246 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8247 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8248 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8249 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8250 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8252 expand_builtin_sync_lock_release (mode, exp);
8253 return const0_rtx;
8255 case BUILT_IN_SYNC_SYNCHRONIZE:
8256 expand_builtin_sync_synchronize ();
8257 return const0_rtx;
8259 case BUILT_IN_ATOMIC_EXCHANGE_1:
8260 case BUILT_IN_ATOMIC_EXCHANGE_2:
8261 case BUILT_IN_ATOMIC_EXCHANGE_4:
8262 case BUILT_IN_ATOMIC_EXCHANGE_8:
8263 case BUILT_IN_ATOMIC_EXCHANGE_16:
8264 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8265 target = expand_builtin_atomic_exchange (mode, exp, target);
8266 if (target)
8267 return target;
8268 break;
8270 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8271 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8272 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8273 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8274 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8276 unsigned int nargs, z;
8277 vec<tree, va_gc> *vec;
8279 mode =
8280 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8281 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8282 if (target)
8283 return target;
8285 /* If this is turned into an external library call, the weak parameter
8286 must be dropped to match the expected parameter list. */
8287 nargs = call_expr_nargs (exp);
8288 vec_alloc (vec, nargs - 1);
8289 for (z = 0; z < 3; z++)
8290 vec->quick_push (CALL_EXPR_ARG (exp, z));
8291 /* Skip the boolean weak parameter. */
8292 for (z = 4; z < 6; z++)
8293 vec->quick_push (CALL_EXPR_ARG (exp, z));
8294 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8295 break;
8298 case BUILT_IN_ATOMIC_LOAD_1:
8299 case BUILT_IN_ATOMIC_LOAD_2:
8300 case BUILT_IN_ATOMIC_LOAD_4:
8301 case BUILT_IN_ATOMIC_LOAD_8:
8302 case BUILT_IN_ATOMIC_LOAD_16:
8303 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8304 target = expand_builtin_atomic_load (mode, exp, target);
8305 if (target)
8306 return target;
8307 break;
8309 case BUILT_IN_ATOMIC_STORE_1:
8310 case BUILT_IN_ATOMIC_STORE_2:
8311 case BUILT_IN_ATOMIC_STORE_4:
8312 case BUILT_IN_ATOMIC_STORE_8:
8313 case BUILT_IN_ATOMIC_STORE_16:
8314 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8315 target = expand_builtin_atomic_store (mode, exp);
8316 if (target)
8317 return const0_rtx;
8318 break;
8320 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8321 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8322 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8323 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8324 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8326 enum built_in_function lib;
8327 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8328 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8329 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8330 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8331 ignore, lib);
8332 if (target)
8333 return target;
8334 break;
8336 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8337 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8338 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8339 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8340 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8342 enum built_in_function lib;
8343 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8344 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8345 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8346 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8347 ignore, lib);
8348 if (target)
8349 return target;
8350 break;
8352 case BUILT_IN_ATOMIC_AND_FETCH_1:
8353 case BUILT_IN_ATOMIC_AND_FETCH_2:
8354 case BUILT_IN_ATOMIC_AND_FETCH_4:
8355 case BUILT_IN_ATOMIC_AND_FETCH_8:
8356 case BUILT_IN_ATOMIC_AND_FETCH_16:
8358 enum built_in_function lib;
8359 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8360 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8361 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8362 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8363 ignore, lib);
8364 if (target)
8365 return target;
8366 break;
8368 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8369 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8370 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8371 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8372 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8374 enum built_in_function lib;
8375 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8376 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8377 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8378 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8379 ignore, lib);
8380 if (target)
8381 return target;
8382 break;
8384 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8385 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8386 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8387 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8388 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8390 enum built_in_function lib;
8391 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8392 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8393 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8394 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8395 ignore, lib);
8396 if (target)
8397 return target;
8398 break;
8400 case BUILT_IN_ATOMIC_OR_FETCH_1:
8401 case BUILT_IN_ATOMIC_OR_FETCH_2:
8402 case BUILT_IN_ATOMIC_OR_FETCH_4:
8403 case BUILT_IN_ATOMIC_OR_FETCH_8:
8404 case BUILT_IN_ATOMIC_OR_FETCH_16:
8406 enum built_in_function lib;
8407 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8408 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8409 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8410 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8411 ignore, lib);
8412 if (target)
8413 return target;
8414 break;
8416 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8417 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8418 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8419 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8420 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8421 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8422 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8423 ignore, BUILT_IN_NONE);
8424 if (target)
8425 return target;
8426 break;
8428 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8429 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8430 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8431 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8432 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8433 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8434 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8435 ignore, BUILT_IN_NONE);
8436 if (target)
8437 return target;
8438 break;
8440 case BUILT_IN_ATOMIC_FETCH_AND_1:
8441 case BUILT_IN_ATOMIC_FETCH_AND_2:
8442 case BUILT_IN_ATOMIC_FETCH_AND_4:
8443 case BUILT_IN_ATOMIC_FETCH_AND_8:
8444 case BUILT_IN_ATOMIC_FETCH_AND_16:
8445 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8446 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8447 ignore, BUILT_IN_NONE);
8448 if (target)
8449 return target;
8450 break;
8452 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8453 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8454 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8455 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8456 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8457 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8458 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8459 ignore, BUILT_IN_NONE);
8460 if (target)
8461 return target;
8462 break;
8464 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8465 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8466 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8467 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8468 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8470 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8471 ignore, BUILT_IN_NONE);
8472 if (target)
8473 return target;
8474 break;
8476 case BUILT_IN_ATOMIC_FETCH_OR_1:
8477 case BUILT_IN_ATOMIC_FETCH_OR_2:
8478 case BUILT_IN_ATOMIC_FETCH_OR_4:
8479 case BUILT_IN_ATOMIC_FETCH_OR_8:
8480 case BUILT_IN_ATOMIC_FETCH_OR_16:
8481 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8482 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8483 ignore, BUILT_IN_NONE);
8484 if (target)
8485 return target;
8486 break;
8488 case BUILT_IN_ATOMIC_TEST_AND_SET:
8489 target = expand_builtin_atomic_test_and_set (exp, target);
8490 if (target)
8491 return target;
8492 break;
8494 case BUILT_IN_ATOMIC_CLEAR:
8495 return expand_builtin_atomic_clear (exp);
8497 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8498 return expand_builtin_atomic_always_lock_free (exp);
8500 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8501 target = expand_builtin_atomic_is_lock_free (exp);
8502 if (target)
8503 return target;
8504 break;
8506 case BUILT_IN_ATOMIC_THREAD_FENCE:
8507 expand_builtin_atomic_thread_fence (exp);
8508 return const0_rtx;
8510 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8511 expand_builtin_atomic_signal_fence (exp);
8512 return const0_rtx;
8514 case BUILT_IN_OBJECT_SIZE:
8515 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8516 return expand_builtin_object_size (exp);
8518 case BUILT_IN_MEMCPY_CHK:
8519 case BUILT_IN_MEMPCPY_CHK:
8520 case BUILT_IN_MEMMOVE_CHK:
8521 case BUILT_IN_MEMSET_CHK:
8522 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8523 if (target)
8524 return target;
8525 break;
8527 case BUILT_IN_STRCPY_CHK:
8528 case BUILT_IN_STPCPY_CHK:
8529 case BUILT_IN_STRNCPY_CHK:
8530 case BUILT_IN_STPNCPY_CHK:
8531 case BUILT_IN_STRCAT_CHK:
8532 case BUILT_IN_STRNCAT_CHK:
8533 case BUILT_IN_SNPRINTF_CHK:
8534 case BUILT_IN_VSNPRINTF_CHK:
8535 maybe_emit_chk_warning (exp, fcode);
8536 break;
8538 case BUILT_IN_SPRINTF_CHK:
8539 case BUILT_IN_VSPRINTF_CHK:
8540 maybe_emit_sprintf_chk_warning (exp, fcode);
8541 break;
8543 case BUILT_IN_THREAD_POINTER:
8544 return expand_builtin_thread_pointer (exp, target);
8546 case BUILT_IN_SET_THREAD_POINTER:
8547 expand_builtin_set_thread_pointer (exp);
8548 return const0_rtx;
8550 case BUILT_IN_ACC_ON_DEVICE:
8551 /* Do library call, if we failed to expand the builtin when
8552 folding. */
8553 break;
8555 case BUILT_IN_GOACC_PARLEVEL_ID:
8556 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8557 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8559 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8560 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8562 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8563 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8564 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8565 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8566 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8568 return expand_speculation_safe_value (mode, exp, target, ignore);
8570 default: /* just do library call, if unknown builtin */
8571 break;
8574 /* The switch statement above can drop through to cause the function
8575 to be called normally. */
8576 return expand_call (exp, target, ignore);
8579 /* Determine whether a tree node represents a call to a built-in
8580 function. If the tree T is a call to a built-in function with
8581 the right number of arguments of the appropriate types, return
8582 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8583 Otherwise the return value is END_BUILTINS. */
8585 enum built_in_function
8586 builtin_mathfn_code (const_tree t)
8588 const_tree fndecl, arg, parmlist;
8589 const_tree argtype, parmtype;
8590 const_call_expr_arg_iterator iter;
8592 if (TREE_CODE (t) != CALL_EXPR)
8593 return END_BUILTINS;
8595 fndecl = get_callee_fndecl (t);
8596 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8597 return END_BUILTINS;
8599 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8600 init_const_call_expr_arg_iterator (t, &iter);
8601 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8603 /* If a function doesn't take a variable number of arguments,
8604 the last element in the list will have type `void'. */
8605 parmtype = TREE_VALUE (parmlist);
8606 if (VOID_TYPE_P (parmtype))
8608 if (more_const_call_expr_args_p (&iter))
8609 return END_BUILTINS;
8610 return DECL_FUNCTION_CODE (fndecl);
8613 if (! more_const_call_expr_args_p (&iter))
8614 return END_BUILTINS;
8616 arg = next_const_call_expr_arg (&iter);
8617 argtype = TREE_TYPE (arg);
8619 if (SCALAR_FLOAT_TYPE_P (parmtype))
8621 if (! SCALAR_FLOAT_TYPE_P (argtype))
8622 return END_BUILTINS;
8624 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8626 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8627 return END_BUILTINS;
8629 else if (POINTER_TYPE_P (parmtype))
8631 if (! POINTER_TYPE_P (argtype))
8632 return END_BUILTINS;
8634 else if (INTEGRAL_TYPE_P (parmtype))
8636 if (! INTEGRAL_TYPE_P (argtype))
8637 return END_BUILTINS;
8639 else
8640 return END_BUILTINS;
8643 /* Variable-length argument list. */
8644 return DECL_FUNCTION_CODE (fndecl);
8647 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8648 evaluate to a constant. */
8650 static tree
8651 fold_builtin_constant_p (tree arg)
8653 /* We return 1 for a numeric type that's known to be a constant
8654 value at compile-time or for an aggregate type that's a
8655 literal constant. */
8656 STRIP_NOPS (arg);
8658 /* If we know this is a constant, emit the constant of one. */
8659 if (CONSTANT_CLASS_P (arg)
8660 || (TREE_CODE (arg) == CONSTRUCTOR
8661 && TREE_CONSTANT (arg)))
8662 return integer_one_node;
8663 if (TREE_CODE (arg) == ADDR_EXPR)
8665 tree op = TREE_OPERAND (arg, 0);
8666 if (TREE_CODE (op) == STRING_CST
8667 || (TREE_CODE (op) == ARRAY_REF
8668 && integer_zerop (TREE_OPERAND (op, 1))
8669 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8670 return integer_one_node;
8673 /* If this expression has side effects, show we don't know it to be a
8674 constant. Likewise if it's a pointer or aggregate type since in
8675 those case we only want literals, since those are only optimized
8676 when generating RTL, not later.
8677 And finally, if we are compiling an initializer, not code, we
8678 need to return a definite result now; there's not going to be any
8679 more optimization done. */
8680 if (TREE_SIDE_EFFECTS (arg)
8681 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8682 || POINTER_TYPE_P (TREE_TYPE (arg))
8683 || cfun == 0
8684 || folding_initializer
8685 || force_folding_builtin_constant_p)
8686 return integer_zero_node;
8688 return NULL_TREE;
8691 /* Create builtin_expect or builtin_expect_with_probability
8692 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8693 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8694 builtin_expect_with_probability instead uses third argument as PROBABILITY
8695 value. */
8697 static tree
8698 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8699 tree predictor, tree probability)
8701 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8703 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8704 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8705 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8706 ret_type = TREE_TYPE (TREE_TYPE (fn));
8707 pred_type = TREE_VALUE (arg_types);
8708 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8710 pred = fold_convert_loc (loc, pred_type, pred);
8711 expected = fold_convert_loc (loc, expected_type, expected);
8713 if (probability)
8714 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8715 else
8716 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8717 predictor);
8719 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8720 build_int_cst (ret_type, 0));
8723 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8724 NULL_TREE if no simplification is possible. */
8726 tree
8727 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8728 tree arg3)
8730 tree inner, fndecl, inner_arg0;
8731 enum tree_code code;
8733 /* Distribute the expected value over short-circuiting operators.
8734 See through the cast from truthvalue_type_node to long. */
8735 inner_arg0 = arg0;
8736 while (CONVERT_EXPR_P (inner_arg0)
8737 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8738 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8739 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8741 /* If this is a builtin_expect within a builtin_expect keep the
8742 inner one. See through a comparison against a constant. It
8743 might have been added to create a thruthvalue. */
8744 inner = inner_arg0;
8746 if (COMPARISON_CLASS_P (inner)
8747 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8748 inner = TREE_OPERAND (inner, 0);
8750 if (TREE_CODE (inner) == CALL_EXPR
8751 && (fndecl = get_callee_fndecl (inner))
8752 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
8753 BUILT_IN_EXPECT_WITH_PROBABILITY))
8754 return arg0;
8756 inner = inner_arg0;
8757 code = TREE_CODE (inner);
8758 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8760 tree op0 = TREE_OPERAND (inner, 0);
8761 tree op1 = TREE_OPERAND (inner, 1);
8762 arg1 = save_expr (arg1);
8764 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8765 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8766 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8768 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8771 /* If the argument isn't invariant then there's nothing else we can do. */
8772 if (!TREE_CONSTANT (inner_arg0))
8773 return NULL_TREE;
8775 /* If we expect that a comparison against the argument will fold to
8776 a constant return the constant. In practice, this means a true
8777 constant or the address of a non-weak symbol. */
8778 inner = inner_arg0;
8779 STRIP_NOPS (inner);
8780 if (TREE_CODE (inner) == ADDR_EXPR)
8784 inner = TREE_OPERAND (inner, 0);
8786 while (TREE_CODE (inner) == COMPONENT_REF
8787 || TREE_CODE (inner) == ARRAY_REF);
8788 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8789 return NULL_TREE;
8792 /* Otherwise, ARG0 already has the proper type for the return value. */
8793 return arg0;
8796 /* Fold a call to __builtin_classify_type with argument ARG. */
8798 static tree
8799 fold_builtin_classify_type (tree arg)
8801 if (arg == 0)
8802 return build_int_cst (integer_type_node, no_type_class);
8804 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8807 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8808 ARG. */
8810 static tree
8811 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8813 if (!validate_arg (arg, POINTER_TYPE))
8814 return NULL_TREE;
8815 else
8817 c_strlen_data lendata = { };
8818 tree len = c_strlen (arg, 0, &lendata);
8820 if (len)
8821 return fold_convert_loc (loc, type, len);
8823 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8824 also early enough to detect invalid reads in multimensional
8825 arrays and struct members. */
8826 if (!lendata.decl)
8827 c_strlen (arg, 1, &lendata);
8829 if (lendata.decl)
8831 if (EXPR_HAS_LOCATION (arg))
8832 loc = EXPR_LOCATION (arg);
8833 else if (loc == UNKNOWN_LOCATION)
8834 loc = input_location;
8835 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8838 return NULL_TREE;
8842 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8844 static tree
8845 fold_builtin_inf (location_t loc, tree type, int warn)
8847 /* __builtin_inff is intended to be usable to define INFINITY on all
8848 targets. If an infinity is not available, INFINITY expands "to a
8849 positive constant of type float that overflows at translation
8850 time", footnote "In this case, using INFINITY will violate the
8851 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8852 Thus we pedwarn to ensure this constraint violation is
8853 diagnosed. */
8854 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8855 pedwarn (loc, 0, "target format does not support infinity");
8857 return build_real (type, dconstinf);
8860 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8861 NULL_TREE if no simplification can be made. */
8863 static tree
8864 fold_builtin_sincos (location_t loc,
8865 tree arg0, tree arg1, tree arg2)
8867 tree type;
8868 tree fndecl, call = NULL_TREE;
8870 if (!validate_arg (arg0, REAL_TYPE)
8871 || !validate_arg (arg1, POINTER_TYPE)
8872 || !validate_arg (arg2, POINTER_TYPE))
8873 return NULL_TREE;
8875 type = TREE_TYPE (arg0);
8877 /* Calculate the result when the argument is a constant. */
8878 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8879 if (fn == END_BUILTINS)
8880 return NULL_TREE;
8882 /* Canonicalize sincos to cexpi. */
8883 if (TREE_CODE (arg0) == REAL_CST)
8885 tree complex_type = build_complex_type (type);
8886 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8888 if (!call)
8890 if (!targetm.libc_has_function (function_c99_math_complex, type)
8891 || !builtin_decl_implicit_p (fn))
8892 return NULL_TREE;
8893 fndecl = builtin_decl_explicit (fn);
8894 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8895 call = builtin_save_expr (call);
8898 tree ptype = build_pointer_type (type);
8899 arg1 = fold_convert (ptype, arg1);
8900 arg2 = fold_convert (ptype, arg2);
8901 return build2 (COMPOUND_EXPR, void_type_node,
8902 build2 (MODIFY_EXPR, void_type_node,
8903 build_fold_indirect_ref_loc (loc, arg1),
8904 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8905 build2 (MODIFY_EXPR, void_type_node,
8906 build_fold_indirect_ref_loc (loc, arg2),
8907 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8910 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8911 Return NULL_TREE if no simplification can be made. */
8913 static tree
8914 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8916 if (!validate_arg (arg1, POINTER_TYPE)
8917 || !validate_arg (arg2, POINTER_TYPE)
8918 || !validate_arg (len, INTEGER_TYPE))
8919 return NULL_TREE;
8921 /* If the LEN parameter is zero, return zero. */
8922 if (integer_zerop (len))
8923 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8924 arg1, arg2);
8926 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8927 if (operand_equal_p (arg1, arg2, 0))
8928 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8930 /* If len parameter is one, return an expression corresponding to
8931 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8932 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8934 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8935 tree cst_uchar_ptr_node
8936 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8938 tree ind1
8939 = fold_convert_loc (loc, integer_type_node,
8940 build1 (INDIRECT_REF, cst_uchar_node,
8941 fold_convert_loc (loc,
8942 cst_uchar_ptr_node,
8943 arg1)));
8944 tree ind2
8945 = fold_convert_loc (loc, integer_type_node,
8946 build1 (INDIRECT_REF, cst_uchar_node,
8947 fold_convert_loc (loc,
8948 cst_uchar_ptr_node,
8949 arg2)));
8950 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8953 return NULL_TREE;
8956 /* Fold a call to builtin isascii with argument ARG. */
8958 static tree
8959 fold_builtin_isascii (location_t loc, tree arg)
8961 if (!validate_arg (arg, INTEGER_TYPE))
8962 return NULL_TREE;
8963 else
8965 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8966 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8967 build_int_cst (integer_type_node,
8968 ~ (unsigned HOST_WIDE_INT) 0x7f));
8969 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8970 arg, integer_zero_node);
8974 /* Fold a call to builtin toascii with argument ARG. */
8976 static tree
8977 fold_builtin_toascii (location_t loc, tree arg)
8979 if (!validate_arg (arg, INTEGER_TYPE))
8980 return NULL_TREE;
8982 /* Transform toascii(c) -> (c & 0x7f). */
8983 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8984 build_int_cst (integer_type_node, 0x7f));
8987 /* Fold a call to builtin isdigit with argument ARG. */
8989 static tree
8990 fold_builtin_isdigit (location_t loc, tree arg)
8992 if (!validate_arg (arg, INTEGER_TYPE))
8993 return NULL_TREE;
8994 else
8996 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8997 /* According to the C standard, isdigit is unaffected by locale.
8998 However, it definitely is affected by the target character set. */
8999 unsigned HOST_WIDE_INT target_digit0
9000 = lang_hooks.to_target_charset ('0');
9002 if (target_digit0 == 0)
9003 return NULL_TREE;
9005 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9006 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9007 build_int_cst (unsigned_type_node, target_digit0));
9008 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9009 build_int_cst (unsigned_type_node, 9));
9013 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9015 static tree
9016 fold_builtin_fabs (location_t loc, tree arg, tree type)
9018 if (!validate_arg (arg, REAL_TYPE))
9019 return NULL_TREE;
9021 arg = fold_convert_loc (loc, type, arg);
9022 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9025 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9027 static tree
9028 fold_builtin_abs (location_t loc, tree arg, tree type)
9030 if (!validate_arg (arg, INTEGER_TYPE))
9031 return NULL_TREE;
9033 arg = fold_convert_loc (loc, type, arg);
9034 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9037 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9039 static tree
9040 fold_builtin_carg (location_t loc, tree arg, tree type)
9042 if (validate_arg (arg, COMPLEX_TYPE)
9043 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9045 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9047 if (atan2_fn)
9049 tree new_arg = builtin_save_expr (arg);
9050 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9051 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9052 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9056 return NULL_TREE;
9059 /* Fold a call to builtin frexp, we can assume the base is 2. */
9061 static tree
9062 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9064 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9065 return NULL_TREE;
9067 STRIP_NOPS (arg0);
9069 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9070 return NULL_TREE;
9072 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9074 /* Proceed if a valid pointer type was passed in. */
9075 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9077 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9078 tree frac, exp, res;
9080 switch (value->cl)
9082 case rvc_zero:
9083 /* For +-0, return (*exp = 0, +-0). */
9084 exp = integer_zero_node;
9085 frac = arg0;
9086 break;
9087 case rvc_nan:
9088 case rvc_inf:
9089 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9090 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9091 case rvc_normal:
9093 /* Since the frexp function always expects base 2, and in
9094 GCC normalized significands are already in the range
9095 [0.5, 1.0), we have exactly what frexp wants. */
9096 REAL_VALUE_TYPE frac_rvt = *value;
9097 SET_REAL_EXP (&frac_rvt, 0);
9098 frac = build_real (rettype, frac_rvt);
9099 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9101 break;
9102 default:
9103 gcc_unreachable ();
9106 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9107 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9108 TREE_SIDE_EFFECTS (arg1) = 1;
9109 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9110 suppress_warning (res, OPT_Wunused_value);
9111 return res;
9114 return NULL_TREE;
9117 /* Fold a call to builtin modf. */
9119 static tree
9120 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9122 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9123 return NULL_TREE;
9125 STRIP_NOPS (arg0);
9127 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9128 return NULL_TREE;
9130 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9132 /* Proceed if a valid pointer type was passed in. */
9133 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9135 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9136 REAL_VALUE_TYPE trunc, frac;
9137 tree res;
9139 switch (value->cl)
9141 case rvc_nan:
9142 case rvc_zero:
9143 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9144 trunc = frac = *value;
9145 break;
9146 case rvc_inf:
9147 /* For +-Inf, return (*arg1 = arg0, +-0). */
9148 frac = dconst0;
9149 frac.sign = value->sign;
9150 trunc = *value;
9151 break;
9152 case rvc_normal:
9153 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9154 real_trunc (&trunc, VOIDmode, value);
9155 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9156 /* If the original number was negative and already
9157 integral, then the fractional part is -0.0. */
9158 if (value->sign && frac.cl == rvc_zero)
9159 frac.sign = value->sign;
9160 break;
9163 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9164 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9165 build_real (rettype, trunc));
9166 TREE_SIDE_EFFECTS (arg1) = 1;
9167 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9168 build_real (rettype, frac));
9169 suppress_warning (res, OPT_Wunused_value);
9170 return res;
9173 return NULL_TREE;
9176 /* Given a location LOC, an interclass builtin function decl FNDECL
9177 and its single argument ARG, return an folded expression computing
9178 the same, or NULL_TREE if we either couldn't or didn't want to fold
9179 (the latter happen if there's an RTL instruction available). */
9181 static tree
9182 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9184 machine_mode mode;
9186 if (!validate_arg (arg, REAL_TYPE))
9187 return NULL_TREE;
9189 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9190 return NULL_TREE;
9192 mode = TYPE_MODE (TREE_TYPE (arg));
9194 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9196 /* If there is no optab, try generic code. */
9197 switch (DECL_FUNCTION_CODE (fndecl))
9199 tree result;
9201 CASE_FLT_FN (BUILT_IN_ISINF):
9203 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9204 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9205 tree type = TREE_TYPE (arg);
9206 REAL_VALUE_TYPE r;
9207 char buf[128];
9209 if (is_ibm_extended)
9211 /* NaN and Inf are encoded in the high-order double value
9212 only. The low-order value is not significant. */
9213 type = double_type_node;
9214 mode = DFmode;
9215 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9217 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9218 real_from_string (&r, buf);
9219 result = build_call_expr (isgr_fn, 2,
9220 fold_build1_loc (loc, ABS_EXPR, type, arg),
9221 build_real (type, r));
9222 return result;
9224 CASE_FLT_FN (BUILT_IN_FINITE):
9225 case BUILT_IN_ISFINITE:
9227 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9228 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9229 tree type = TREE_TYPE (arg);
9230 REAL_VALUE_TYPE r;
9231 char buf[128];
9233 if (is_ibm_extended)
9235 /* NaN and Inf are encoded in the high-order double value
9236 only. The low-order value is not significant. */
9237 type = double_type_node;
9238 mode = DFmode;
9239 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9241 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9242 real_from_string (&r, buf);
9243 result = build_call_expr (isle_fn, 2,
9244 fold_build1_loc (loc, ABS_EXPR, type, arg),
9245 build_real (type, r));
9246 /*result = fold_build2_loc (loc, UNGT_EXPR,
9247 TREE_TYPE (TREE_TYPE (fndecl)),
9248 fold_build1_loc (loc, ABS_EXPR, type, arg),
9249 build_real (type, r));
9250 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9251 TREE_TYPE (TREE_TYPE (fndecl)),
9252 result);*/
9253 return result;
9255 case BUILT_IN_ISNORMAL:
9257 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9258 islessequal(fabs(x),DBL_MAX). */
9259 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9260 tree type = TREE_TYPE (arg);
9261 tree orig_arg, max_exp, min_exp;
9262 machine_mode orig_mode = mode;
9263 REAL_VALUE_TYPE rmax, rmin;
9264 char buf[128];
9266 orig_arg = arg = builtin_save_expr (arg);
9267 if (is_ibm_extended)
9269 /* Use double to test the normal range of IBM extended
9270 precision. Emin for IBM extended precision is
9271 different to emin for IEEE double, being 53 higher
9272 since the low double exponent is at least 53 lower
9273 than the high double exponent. */
9274 type = double_type_node;
9275 mode = DFmode;
9276 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9278 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9280 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9281 real_from_string (&rmax, buf);
9282 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9283 real_from_string (&rmin, buf);
9284 max_exp = build_real (type, rmax);
9285 min_exp = build_real (type, rmin);
9287 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9288 if (is_ibm_extended)
9290 /* Testing the high end of the range is done just using
9291 the high double, using the same test as isfinite().
9292 For the subnormal end of the range we first test the
9293 high double, then if its magnitude is equal to the
9294 limit of 0x1p-969, we test whether the low double is
9295 non-zero and opposite sign to the high double. */
9296 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9297 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9298 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9299 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9300 arg, min_exp);
9301 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9302 complex_double_type_node, orig_arg);
9303 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9304 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9305 tree zero = build_real (type, dconst0);
9306 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9307 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9308 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9309 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9310 fold_build3 (COND_EXPR,
9311 integer_type_node,
9312 hilt, logt, lolt));
9313 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9314 eq_min, ok_lo);
9315 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9316 gt_min, eq_min);
9318 else
9320 tree const isge_fn
9321 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9322 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9324 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9325 max_exp, min_exp);
9326 return result;
9328 default:
9329 break;
9332 return NULL_TREE;
9335 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9336 ARG is the argument for the call. */
9338 static tree
9339 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9341 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9343 if (!validate_arg (arg, REAL_TYPE))
9344 return NULL_TREE;
9346 switch (builtin_index)
9348 case BUILT_IN_ISINF:
9349 if (tree_expr_infinite_p (arg))
9350 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9351 if (!tree_expr_maybe_infinite_p (arg))
9352 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9353 return NULL_TREE;
9355 case BUILT_IN_ISINF_SIGN:
9357 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9358 /* In a boolean context, GCC will fold the inner COND_EXPR to
9359 1. So e.g. "if (isinf_sign(x))" would be folded to just
9360 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9361 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9362 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9363 tree tmp = NULL_TREE;
9365 arg = builtin_save_expr (arg);
9367 if (signbit_fn && isinf_fn)
9369 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9370 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9372 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9373 signbit_call, integer_zero_node);
9374 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9375 isinf_call, integer_zero_node);
9377 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9378 integer_minus_one_node, integer_one_node);
9379 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9380 isinf_call, tmp,
9381 integer_zero_node);
9384 return tmp;
9387 case BUILT_IN_ISFINITE:
9388 if (tree_expr_finite_p (arg))
9389 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9390 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9391 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9392 return NULL_TREE;
9394 case BUILT_IN_ISNAN:
9395 if (tree_expr_nan_p (arg))
9396 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9397 if (!tree_expr_maybe_nan_p (arg))
9398 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9401 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9402 if (is_ibm_extended)
9404 /* NaN and Inf are encoded in the high-order double value
9405 only. The low-order value is not significant. */
9406 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9409 arg = builtin_save_expr (arg);
9410 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9412 case BUILT_IN_ISSIGNALING:
9413 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9414 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9415 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9416 here, so there is some possibility of __builtin_issignaling working
9417 without -fsignaling-nans. Especially when -fno-signaling-nans is
9418 the default. */
9419 if (!tree_expr_maybe_nan_p (arg))
9420 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9421 return NULL_TREE;
9423 default:
9424 gcc_unreachable ();
9428 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9429 This builtin will generate code to return the appropriate floating
9430 point classification depending on the value of the floating point
9431 number passed in. The possible return values must be supplied as
9432 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9433 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9434 one floating point argument which is "type generic". */
9436 static tree
9437 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9439 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9440 arg, type, res, tmp;
9441 machine_mode mode;
9442 REAL_VALUE_TYPE r;
9443 char buf[128];
9445 /* Verify the required arguments in the original call. */
9446 if (nargs != 6
9447 || !validate_arg (args[0], INTEGER_TYPE)
9448 || !validate_arg (args[1], INTEGER_TYPE)
9449 || !validate_arg (args[2], INTEGER_TYPE)
9450 || !validate_arg (args[3], INTEGER_TYPE)
9451 || !validate_arg (args[4], INTEGER_TYPE)
9452 || !validate_arg (args[5], REAL_TYPE))
9453 return NULL_TREE;
9455 fp_nan = args[0];
9456 fp_infinite = args[1];
9457 fp_normal = args[2];
9458 fp_subnormal = args[3];
9459 fp_zero = args[4];
9460 arg = args[5];
9461 type = TREE_TYPE (arg);
9462 mode = TYPE_MODE (type);
9463 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9465 /* fpclassify(x) ->
9466 isnan(x) ? FP_NAN :
9467 (fabs(x) == Inf ? FP_INFINITE :
9468 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9469 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9471 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9472 build_real (type, dconst0));
9473 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9474 tmp, fp_zero, fp_subnormal);
9476 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9477 real_from_string (&r, buf);
9478 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9479 arg, build_real (type, r));
9480 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9482 if (tree_expr_maybe_infinite_p (arg))
9484 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9485 build_real (type, dconstinf));
9486 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9487 fp_infinite, res);
9490 if (tree_expr_maybe_nan_p (arg))
9492 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9493 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9496 return res;
9499 /* Fold a call to an unordered comparison function such as
9500 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9501 being called and ARG0 and ARG1 are the arguments for the call.
9502 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9503 the opposite of the desired result. UNORDERED_CODE is used
9504 for modes that can hold NaNs and ORDERED_CODE is used for
9505 the rest. */
9507 static tree
9508 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9509 enum tree_code unordered_code,
9510 enum tree_code ordered_code)
9512 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9513 enum tree_code code;
9514 tree type0, type1;
9515 enum tree_code code0, code1;
9516 tree cmp_type = NULL_TREE;
9518 type0 = TREE_TYPE (arg0);
9519 type1 = TREE_TYPE (arg1);
9521 code0 = TREE_CODE (type0);
9522 code1 = TREE_CODE (type1);
9524 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9525 /* Choose the wider of two real types. */
9526 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9527 ? type0 : type1;
9528 else if (code0 == REAL_TYPE
9529 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
9530 cmp_type = type0;
9531 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
9532 && code1 == REAL_TYPE)
9533 cmp_type = type1;
9535 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9536 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9538 if (unordered_code == UNORDERED_EXPR)
9540 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9541 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9542 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9543 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9544 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9547 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9548 ? unordered_code : ordered_code;
9549 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9550 fold_build2_loc (loc, code, type, arg0, arg1));
9553 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9554 After choosing the wider floating-point type for the comparison,
9555 the code is folded to:
9556 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9558 static tree
9559 fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
9561 tree type0, type1;
9562 enum tree_code code0, code1;
9563 tree cmp1, cmp2, cmp_type = NULL_TREE;
9565 type0 = TREE_TYPE (arg0);
9566 type1 = TREE_TYPE (arg1);
9568 code0 = TREE_CODE (type0);
9569 code1 = TREE_CODE (type1);
9571 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9572 /* Choose the wider of two real types. */
9573 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9574 ? type0 : type1;
9575 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9576 cmp_type = type0;
9577 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9578 cmp_type = type1;
9580 arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
9581 arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
9583 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
9584 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
9586 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
9589 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9590 arithmetics if it can never overflow, or into internal functions that
9591 return both result of arithmetics and overflowed boolean flag in
9592 a complex integer result, or some other check for overflow.
9593 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9594 checking part of that. */
9596 static tree
9597 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9598 tree arg0, tree arg1, tree arg2)
9600 enum internal_fn ifn = IFN_LAST;
9601 /* The code of the expression corresponding to the built-in. */
9602 enum tree_code opcode = ERROR_MARK;
9603 bool ovf_only = false;
9605 switch (fcode)
9607 case BUILT_IN_ADD_OVERFLOW_P:
9608 ovf_only = true;
9609 /* FALLTHRU */
9610 case BUILT_IN_ADD_OVERFLOW:
9611 case BUILT_IN_SADD_OVERFLOW:
9612 case BUILT_IN_SADDL_OVERFLOW:
9613 case BUILT_IN_SADDLL_OVERFLOW:
9614 case BUILT_IN_UADD_OVERFLOW:
9615 case BUILT_IN_UADDL_OVERFLOW:
9616 case BUILT_IN_UADDLL_OVERFLOW:
9617 opcode = PLUS_EXPR;
9618 ifn = IFN_ADD_OVERFLOW;
9619 break;
9620 case BUILT_IN_SUB_OVERFLOW_P:
9621 ovf_only = true;
9622 /* FALLTHRU */
9623 case BUILT_IN_SUB_OVERFLOW:
9624 case BUILT_IN_SSUB_OVERFLOW:
9625 case BUILT_IN_SSUBL_OVERFLOW:
9626 case BUILT_IN_SSUBLL_OVERFLOW:
9627 case BUILT_IN_USUB_OVERFLOW:
9628 case BUILT_IN_USUBL_OVERFLOW:
9629 case BUILT_IN_USUBLL_OVERFLOW:
9630 opcode = MINUS_EXPR;
9631 ifn = IFN_SUB_OVERFLOW;
9632 break;
9633 case BUILT_IN_MUL_OVERFLOW_P:
9634 ovf_only = true;
9635 /* FALLTHRU */
9636 case BUILT_IN_MUL_OVERFLOW:
9637 case BUILT_IN_SMUL_OVERFLOW:
9638 case BUILT_IN_SMULL_OVERFLOW:
9639 case BUILT_IN_SMULLL_OVERFLOW:
9640 case BUILT_IN_UMUL_OVERFLOW:
9641 case BUILT_IN_UMULL_OVERFLOW:
9642 case BUILT_IN_UMULLL_OVERFLOW:
9643 opcode = MULT_EXPR;
9644 ifn = IFN_MUL_OVERFLOW;
9645 break;
9646 default:
9647 gcc_unreachable ();
9650 /* For the "generic" overloads, the first two arguments can have different
9651 types and the last argument determines the target type to use to check
9652 for overflow. The arguments of the other overloads all have the same
9653 type. */
9654 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9656 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9657 arguments are constant, attempt to fold the built-in call into a constant
9658 expression indicating whether or not it detected an overflow. */
9659 if (ovf_only
9660 && TREE_CODE (arg0) == INTEGER_CST
9661 && TREE_CODE (arg1) == INTEGER_CST)
9662 /* Perform the computation in the target type and check for overflow. */
9663 return omit_one_operand_loc (loc, boolean_type_node,
9664 arith_overflowed_p (opcode, type, arg0, arg1)
9665 ? boolean_true_node : boolean_false_node,
9666 arg2);
9668 tree intres, ovfres;
9669 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9671 intres = fold_binary_loc (loc, opcode, type,
9672 fold_convert_loc (loc, type, arg0),
9673 fold_convert_loc (loc, type, arg1));
9674 if (TREE_OVERFLOW (intres))
9675 intres = drop_tree_overflow (intres);
9676 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9677 ? boolean_true_node : boolean_false_node);
9679 else
9681 tree ctype = build_complex_type (type);
9682 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9683 arg0, arg1);
9684 tree tgt = save_expr (call);
9685 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9686 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9687 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9690 if (ovf_only)
9691 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9693 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9694 tree store
9695 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9696 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9699 /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
9700 internal function. */
9702 static tree
9703 fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
9704 tree arg0, tree arg1)
9706 enum internal_fn ifn;
9707 enum built_in_function fcodei, fcodel, fcodell;
9708 tree arg0_type = TREE_TYPE (arg0);
9709 tree cast_type = NULL_TREE;
9710 int addend = 0;
9712 switch (fcode)
9714 case BUILT_IN_CLZG:
9715 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
9716 return NULL_TREE;
9717 ifn = IFN_CLZ;
9718 fcodei = BUILT_IN_CLZ;
9719 fcodel = BUILT_IN_CLZL;
9720 fcodell = BUILT_IN_CLZLL;
9721 break;
9722 case BUILT_IN_CTZG:
9723 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
9724 return NULL_TREE;
9725 ifn = IFN_CTZ;
9726 fcodei = BUILT_IN_CTZ;
9727 fcodel = BUILT_IN_CTZL;
9728 fcodell = BUILT_IN_CTZLL;
9729 break;
9730 case BUILT_IN_CLRSBG:
9731 ifn = IFN_CLRSB;
9732 fcodei = BUILT_IN_CLRSB;
9733 fcodel = BUILT_IN_CLRSBL;
9734 fcodell = BUILT_IN_CLRSBLL;
9735 break;
9736 case BUILT_IN_FFSG:
9737 ifn = IFN_FFS;
9738 fcodei = BUILT_IN_FFS;
9739 fcodel = BUILT_IN_FFSL;
9740 fcodell = BUILT_IN_FFSLL;
9741 break;
9742 case BUILT_IN_PARITYG:
9743 ifn = IFN_PARITY;
9744 fcodei = BUILT_IN_PARITY;
9745 fcodel = BUILT_IN_PARITYL;
9746 fcodell = BUILT_IN_PARITYLL;
9747 break;
9748 case BUILT_IN_POPCOUNTG:
9749 ifn = IFN_POPCOUNT;
9750 fcodei = BUILT_IN_POPCOUNT;
9751 fcodel = BUILT_IN_POPCOUNTL;
9752 fcodell = BUILT_IN_POPCOUNTLL;
9753 break;
9754 default:
9755 gcc_unreachable ();
9758 if (TYPE_PRECISION (arg0_type)
9759 <= TYPE_PRECISION (long_long_unsigned_type_node))
9761 if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
9763 cast_type = (TYPE_UNSIGNED (arg0_type)
9764 ? unsigned_type_node : integer_type_node);
9765 else if (TYPE_PRECISION (arg0_type)
9766 <= TYPE_PRECISION (long_unsigned_type_node))
9768 cast_type = (TYPE_UNSIGNED (arg0_type)
9769 ? long_unsigned_type_node : long_integer_type_node);
9770 fcodei = fcodel;
9772 else
9774 cast_type = (TYPE_UNSIGNED (arg0_type)
9775 ? long_long_unsigned_type_node
9776 : long_long_integer_type_node);
9777 fcodei = fcodell;
9780 else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
9782 cast_type
9783 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
9784 TYPE_UNSIGNED (arg0_type));
9785 gcc_assert (TYPE_PRECISION (cast_type)
9786 == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
9787 fcodei = END_BUILTINS;
9789 else
9790 fcodei = END_BUILTINS;
9791 if (cast_type)
9793 switch (fcode)
9795 case BUILT_IN_CLZG:
9796 case BUILT_IN_CLRSBG:
9797 addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
9798 break;
9799 default:
9800 break;
9802 arg0 = fold_convert (cast_type, arg0);
9803 arg0_type = cast_type;
9806 if (arg1)
9807 arg1 = fold_convert (integer_type_node, arg1);
9809 tree arg2 = arg1;
9810 if (fcode == BUILT_IN_CLZG && addend)
9812 if (arg1)
9813 arg0 = save_expr (arg0);
9814 arg2 = NULL_TREE;
9816 tree call = NULL_TREE, tem;
9817 if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
9818 && (TYPE_PRECISION (arg0_type)
9819 == 2 * TYPE_PRECISION (long_long_unsigned_type_node)))
9821 /* __int128 expansions using up to 2 long long builtins. */
9822 arg0 = save_expr (arg0);
9823 tree type = (TYPE_UNSIGNED (arg0_type)
9824 ? long_long_unsigned_type_node
9825 : long_long_integer_type_node);
9826 tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
9827 build_int_cst (integer_type_node,
9828 MAX_FIXED_MODE_SIZE / 2));
9829 hi = fold_convert (type, hi);
9830 tree lo = fold_convert (type, arg0);
9831 switch (fcode)
9833 case BUILT_IN_CLZG:
9834 call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
9835 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9836 build_int_cst (integer_type_node,
9837 MAX_FIXED_MODE_SIZE / 2));
9838 if (arg2)
9839 call = fold_build3 (COND_EXPR, integer_type_node,
9840 fold_build2 (NE_EXPR, boolean_type_node,
9841 lo, build_zero_cst (type)),
9842 call, arg2);
9843 call = fold_build3 (COND_EXPR, integer_type_node,
9844 fold_build2 (NE_EXPR, boolean_type_node,
9845 hi, build_zero_cst (type)),
9846 fold_builtin_bit_query (loc, fcode, hi,
9847 NULL_TREE),
9848 call);
9849 break;
9850 case BUILT_IN_CTZG:
9851 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9852 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9853 build_int_cst (integer_type_node,
9854 MAX_FIXED_MODE_SIZE / 2));
9855 if (arg2)
9856 call = fold_build3 (COND_EXPR, integer_type_node,
9857 fold_build2 (NE_EXPR, boolean_type_node,
9858 hi, build_zero_cst (type)),
9859 call, arg2);
9860 call = fold_build3 (COND_EXPR, integer_type_node,
9861 fold_build2 (NE_EXPR, boolean_type_node,
9862 lo, build_zero_cst (type)),
9863 fold_builtin_bit_query (loc, fcode, lo,
9864 NULL_TREE),
9865 call);
9866 break;
9867 case BUILT_IN_CLRSBG:
9868 tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
9869 tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
9870 build_int_cst (integer_type_node,
9871 MAX_FIXED_MODE_SIZE / 2));
9872 tem = fold_build3 (COND_EXPR, integer_type_node,
9873 fold_build2 (LT_EXPR, boolean_type_node,
9874 fold_build2 (BIT_XOR_EXPR, type,
9875 lo, hi),
9876 build_zero_cst (type)),
9877 build_int_cst (integer_type_node,
9878 MAX_FIXED_MODE_SIZE / 2 - 1),
9879 tem);
9880 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9881 call = save_expr (call);
9882 call = fold_build3 (COND_EXPR, integer_type_node,
9883 fold_build2 (NE_EXPR, boolean_type_node,
9884 call,
9885 build_int_cst (integer_type_node,
9886 MAX_FIXED_MODE_SIZE
9887 / 2 - 1)),
9888 call, tem);
9889 break;
9890 case BUILT_IN_FFSG:
9891 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9892 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9893 build_int_cst (integer_type_node,
9894 MAX_FIXED_MODE_SIZE / 2));
9895 call = fold_build3 (COND_EXPR, integer_type_node,
9896 fold_build2 (NE_EXPR, boolean_type_node,
9897 hi, build_zero_cst (type)),
9898 call, integer_zero_node);
9899 call = fold_build3 (COND_EXPR, integer_type_node,
9900 fold_build2 (NE_EXPR, boolean_type_node,
9901 lo, build_zero_cst (type)),
9902 fold_builtin_bit_query (loc, fcode, lo,
9903 NULL_TREE),
9904 call);
9905 break;
9906 case BUILT_IN_PARITYG:
9907 call = fold_builtin_bit_query (loc, fcode,
9908 fold_build2 (BIT_XOR_EXPR, type,
9909 lo, hi), NULL_TREE);
9910 break;
9911 case BUILT_IN_POPCOUNTG:
9912 call = fold_build2 (PLUS_EXPR, integer_type_node,
9913 fold_builtin_bit_query (loc, fcode, hi,
9914 NULL_TREE),
9915 fold_builtin_bit_query (loc, fcode, lo,
9916 NULL_TREE));
9917 break;
9918 default:
9919 gcc_unreachable ();
9922 else
9924 /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
9925 value defined at zero during GIMPLE, or for large/huge _BitInt
9926 (which are then lowered during bitint lowering). */
9927 if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
9929 int val;
9930 if (fcode == BUILT_IN_CLZG)
9932 if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
9933 val) != 2
9934 || wi::to_widest (arg2) != val)
9935 arg2 = NULL_TREE;
9937 else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
9938 val) != 2
9939 || wi::to_widest (arg2) != val)
9940 arg2 = NULL_TREE;
9941 if (!direct_internal_fn_supported_p (ifn, arg0_type,
9942 OPTIMIZE_FOR_BOTH))
9943 arg2 = NULL_TREE;
9944 if (arg2 == NULL_TREE)
9945 arg0 = save_expr (arg0);
9947 if (fcodei == END_BUILTINS || arg2)
9948 call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
9949 arg2 ? 2 : 1, arg0, arg2);
9950 else
9951 call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
9952 arg0);
9954 if (addend)
9955 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9956 build_int_cst (integer_type_node, addend));
9957 if (arg1 && arg2 == NULL_TREE)
9958 call = fold_build3 (COND_EXPR, integer_type_node,
9959 fold_build2 (NE_EXPR, boolean_type_node,
9960 arg0, build_zero_cst (arg0_type)),
9961 call, arg1);
9963 return call;
9966 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
9967 that return both result of arithmetics and overflowed boolean
9968 flag in a complex integer result. */
9970 static tree
9971 fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
9972 tree *args)
9974 enum internal_fn ifn;
9976 switch (fcode)
9978 case BUILT_IN_ADDC:
9979 case BUILT_IN_ADDCL:
9980 case BUILT_IN_ADDCLL:
9981 ifn = IFN_ADD_OVERFLOW;
9982 break;
9983 case BUILT_IN_SUBC:
9984 case BUILT_IN_SUBCL:
9985 case BUILT_IN_SUBCLL:
9986 ifn = IFN_SUB_OVERFLOW;
9987 break;
9988 default:
9989 gcc_unreachable ();
9992 tree type = TREE_TYPE (args[0]);
9993 tree ctype = build_complex_type (type);
9994 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9995 args[0], args[1]);
9996 tree tgt = save_expr (call);
9997 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9998 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9999 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10000 intres, args[2]);
10001 tgt = save_expr (call);
10002 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10003 tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10004 ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10005 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10006 tree store
10007 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10008 return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10011 /* Fold a call to __builtin_FILE to a constant string. */
10013 static inline tree
10014 fold_builtin_FILE (location_t loc)
10016 if (const char *fname = LOCATION_FILE (loc))
10018 /* The documentation says this builtin is equivalent to the preprocessor
10019 __FILE__ macro so it appears appropriate to use the same file prefix
10020 mappings. */
10021 fname = remap_macro_filename (fname);
10022 return build_string_literal (fname);
10025 return build_string_literal ("");
10028 /* Fold a call to __builtin_FUNCTION to a constant string. */
10030 static inline tree
10031 fold_builtin_FUNCTION ()
10033 const char *name = "";
10035 if (current_function_decl)
10036 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10038 return build_string_literal (name);
10041 /* Fold a call to __builtin_LINE to an integer constant. */
10043 static inline tree
10044 fold_builtin_LINE (location_t loc, tree type)
10046 return build_int_cst (type, LOCATION_LINE (loc));
10049 /* Fold a call to built-in function FNDECL with 0 arguments.
10050 This function returns NULL_TREE if no simplification was possible. */
10052 static tree
10053 fold_builtin_0 (location_t loc, tree fndecl)
10055 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10056 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10057 switch (fcode)
10059 case BUILT_IN_FILE:
10060 return fold_builtin_FILE (loc);
10062 case BUILT_IN_FUNCTION:
10063 return fold_builtin_FUNCTION ();
10065 case BUILT_IN_LINE:
10066 return fold_builtin_LINE (loc, type);
10068 CASE_FLT_FN (BUILT_IN_INF):
10069 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10070 case BUILT_IN_INFD32:
10071 case BUILT_IN_INFD64:
10072 case BUILT_IN_INFD128:
10073 return fold_builtin_inf (loc, type, true);
10075 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10076 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10077 return fold_builtin_inf (loc, type, false);
10079 case BUILT_IN_CLASSIFY_TYPE:
10080 return fold_builtin_classify_type (NULL_TREE);
10082 case BUILT_IN_UNREACHABLE:
10083 /* Rewrite any explicit calls to __builtin_unreachable. */
10084 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10085 return build_builtin_unreachable (loc);
10086 break;
10088 default:
10089 break;
10091 return NULL_TREE;
10094 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10095 This function returns NULL_TREE if no simplification was possible. */
10097 static tree
10098 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10100 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10103 if (TREE_CODE (arg0) == ERROR_MARK)
10104 return NULL_TREE;
10106 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10107 return ret;
10109 switch (fcode)
10111 case BUILT_IN_CONSTANT_P:
10113 tree val = fold_builtin_constant_p (arg0);
10115 /* Gimplification will pull the CALL_EXPR for the builtin out of
10116 an if condition. When not optimizing, we'll not CSE it back.
10117 To avoid link error types of regressions, return false now. */
10118 if (!val && !optimize)
10119 val = integer_zero_node;
10121 return val;
10124 case BUILT_IN_CLASSIFY_TYPE:
10125 return fold_builtin_classify_type (arg0);
10127 case BUILT_IN_STRLEN:
10128 return fold_builtin_strlen (loc, expr, type, arg0);
10130 CASE_FLT_FN (BUILT_IN_FABS):
10131 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10132 case BUILT_IN_FABSD32:
10133 case BUILT_IN_FABSD64:
10134 case BUILT_IN_FABSD128:
10135 return fold_builtin_fabs (loc, arg0, type);
10137 case BUILT_IN_ABS:
10138 case BUILT_IN_LABS:
10139 case BUILT_IN_LLABS:
10140 case BUILT_IN_IMAXABS:
10141 return fold_builtin_abs (loc, arg0, type);
10143 CASE_FLT_FN (BUILT_IN_CONJ):
10144 if (validate_arg (arg0, COMPLEX_TYPE)
10145 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10146 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10147 break;
10149 CASE_FLT_FN (BUILT_IN_CREAL):
10150 if (validate_arg (arg0, COMPLEX_TYPE)
10151 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10152 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10153 break;
10155 CASE_FLT_FN (BUILT_IN_CIMAG):
10156 if (validate_arg (arg0, COMPLEX_TYPE)
10157 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10158 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10159 break;
10161 CASE_FLT_FN (BUILT_IN_CARG):
10162 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10163 return fold_builtin_carg (loc, arg0, type);
10165 case BUILT_IN_ISASCII:
10166 return fold_builtin_isascii (loc, arg0);
10168 case BUILT_IN_TOASCII:
10169 return fold_builtin_toascii (loc, arg0);
10171 case BUILT_IN_ISDIGIT:
10172 return fold_builtin_isdigit (loc, arg0);
10174 CASE_FLT_FN (BUILT_IN_FINITE):
10175 case BUILT_IN_FINITED32:
10176 case BUILT_IN_FINITED64:
10177 case BUILT_IN_FINITED128:
10178 case BUILT_IN_ISFINITE:
10180 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10181 if (ret)
10182 return ret;
10183 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10186 CASE_FLT_FN (BUILT_IN_ISINF):
10187 case BUILT_IN_ISINFD32:
10188 case BUILT_IN_ISINFD64:
10189 case BUILT_IN_ISINFD128:
10191 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10192 if (ret)
10193 return ret;
10194 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10197 case BUILT_IN_ISNORMAL:
10198 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10200 case BUILT_IN_ISINF_SIGN:
10201 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10203 CASE_FLT_FN (BUILT_IN_ISNAN):
10204 case BUILT_IN_ISNAND32:
10205 case BUILT_IN_ISNAND64:
10206 case BUILT_IN_ISNAND128:
10207 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10209 case BUILT_IN_ISSIGNALING:
10210 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10212 case BUILT_IN_FREE:
10213 if (integer_zerop (arg0))
10214 return build_empty_stmt (loc);
10215 break;
10217 case BUILT_IN_CLZG:
10218 case BUILT_IN_CTZG:
10219 case BUILT_IN_CLRSBG:
10220 case BUILT_IN_FFSG:
10221 case BUILT_IN_PARITYG:
10222 case BUILT_IN_POPCOUNTG:
10223 return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10225 default:
10226 break;
10229 return NULL_TREE;
10233 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10234 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10235 if no simplification was possible. */
10237 static tree
10238 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10243 if (TREE_CODE (arg0) == ERROR_MARK
10244 || TREE_CODE (arg1) == ERROR_MARK)
10245 return NULL_TREE;
10247 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10248 return ret;
10250 switch (fcode)
10252 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10253 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10254 if (validate_arg (arg0, REAL_TYPE)
10255 && validate_arg (arg1, POINTER_TYPE))
10256 return do_mpfr_lgamma_r (arg0, arg1, type);
10257 break;
10259 CASE_FLT_FN (BUILT_IN_FREXP):
10260 return fold_builtin_frexp (loc, arg0, arg1, type);
10262 CASE_FLT_FN (BUILT_IN_MODF):
10263 return fold_builtin_modf (loc, arg0, arg1, type);
10265 case BUILT_IN_STRSPN:
10266 return fold_builtin_strspn (loc, expr, arg0, arg1);
10268 case BUILT_IN_STRCSPN:
10269 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10271 case BUILT_IN_STRPBRK:
10272 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10274 case BUILT_IN_EXPECT:
10275 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10277 case BUILT_IN_ISGREATER:
10278 return fold_builtin_unordered_cmp (loc, fndecl,
10279 arg0, arg1, UNLE_EXPR, LE_EXPR);
10280 case BUILT_IN_ISGREATEREQUAL:
10281 return fold_builtin_unordered_cmp (loc, fndecl,
10282 arg0, arg1, UNLT_EXPR, LT_EXPR);
10283 case BUILT_IN_ISLESS:
10284 return fold_builtin_unordered_cmp (loc, fndecl,
10285 arg0, arg1, UNGE_EXPR, GE_EXPR);
10286 case BUILT_IN_ISLESSEQUAL:
10287 return fold_builtin_unordered_cmp (loc, fndecl,
10288 arg0, arg1, UNGT_EXPR, GT_EXPR);
10289 case BUILT_IN_ISLESSGREATER:
10290 return fold_builtin_unordered_cmp (loc, fndecl,
10291 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10292 case BUILT_IN_ISUNORDERED:
10293 return fold_builtin_unordered_cmp (loc, fndecl,
10294 arg0, arg1, UNORDERED_EXPR,
10295 NOP_EXPR);
10297 case BUILT_IN_ISEQSIG:
10298 return fold_builtin_iseqsig (loc, arg0, arg1);
10300 /* We do the folding for va_start in the expander. */
10301 case BUILT_IN_VA_START:
10302 break;
10304 case BUILT_IN_OBJECT_SIZE:
10305 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10306 return fold_builtin_object_size (arg0, arg1, fcode);
10308 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10309 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10311 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10312 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10314 case BUILT_IN_CLZG:
10315 case BUILT_IN_CTZG:
10316 return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10318 default:
10319 break;
10321 return NULL_TREE;
10324 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10325 and ARG2.
10326 This function returns NULL_TREE if no simplification was possible. */
10328 static tree
10329 fold_builtin_3 (location_t loc, tree fndecl,
10330 tree arg0, tree arg1, tree arg2)
10332 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10333 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10335 if (TREE_CODE (arg0) == ERROR_MARK
10336 || TREE_CODE (arg1) == ERROR_MARK
10337 || TREE_CODE (arg2) == ERROR_MARK)
10338 return NULL_TREE;
10340 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10341 arg0, arg1, arg2))
10342 return ret;
10344 switch (fcode)
10347 CASE_FLT_FN (BUILT_IN_SINCOS):
10348 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10350 CASE_FLT_FN (BUILT_IN_REMQUO):
10351 if (validate_arg (arg0, REAL_TYPE)
10352 && validate_arg (arg1, REAL_TYPE)
10353 && validate_arg (arg2, POINTER_TYPE))
10354 return do_mpfr_remquo (arg0, arg1, arg2);
10355 break;
10357 case BUILT_IN_MEMCMP:
10358 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10360 case BUILT_IN_EXPECT:
10361 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10363 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10364 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10366 case BUILT_IN_ADD_OVERFLOW:
10367 case BUILT_IN_SUB_OVERFLOW:
10368 case BUILT_IN_MUL_OVERFLOW:
10369 case BUILT_IN_ADD_OVERFLOW_P:
10370 case BUILT_IN_SUB_OVERFLOW_P:
10371 case BUILT_IN_MUL_OVERFLOW_P:
10372 case BUILT_IN_SADD_OVERFLOW:
10373 case BUILT_IN_SADDL_OVERFLOW:
10374 case BUILT_IN_SADDLL_OVERFLOW:
10375 case BUILT_IN_SSUB_OVERFLOW:
10376 case BUILT_IN_SSUBL_OVERFLOW:
10377 case BUILT_IN_SSUBLL_OVERFLOW:
10378 case BUILT_IN_SMUL_OVERFLOW:
10379 case BUILT_IN_SMULL_OVERFLOW:
10380 case BUILT_IN_SMULLL_OVERFLOW:
10381 case BUILT_IN_UADD_OVERFLOW:
10382 case BUILT_IN_UADDL_OVERFLOW:
10383 case BUILT_IN_UADDLL_OVERFLOW:
10384 case BUILT_IN_USUB_OVERFLOW:
10385 case BUILT_IN_USUBL_OVERFLOW:
10386 case BUILT_IN_USUBLL_OVERFLOW:
10387 case BUILT_IN_UMUL_OVERFLOW:
10388 case BUILT_IN_UMULL_OVERFLOW:
10389 case BUILT_IN_UMULLL_OVERFLOW:
10390 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10392 default:
10393 break;
10395 return NULL_TREE;
10398 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10399 ARGS is an array of NARGS arguments. IGNORE is true if the result
10400 of the function call is ignored. This function returns NULL_TREE
10401 if no simplification was possible. */
10403 static tree
10404 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10405 int nargs, bool)
10407 tree ret = NULL_TREE;
10409 switch (nargs)
10411 case 0:
10412 ret = fold_builtin_0 (loc, fndecl);
10413 break;
10414 case 1:
10415 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10416 break;
10417 case 2:
10418 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10419 break;
10420 case 3:
10421 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10422 break;
10423 default:
10424 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10425 break;
10427 if (ret)
10429 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10430 SET_EXPR_LOCATION (ret, loc);
10431 return ret;
10433 return NULL_TREE;
10436 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10437 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10438 of arguments in ARGS to be omitted. OLDNARGS is the number of
10439 elements in ARGS. */
10441 static tree
10442 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10443 int skip, tree fndecl, int n, va_list newargs)
10445 int nargs = oldnargs - skip + n;
10446 tree *buffer;
10448 if (n > 0)
10450 int i, j;
10452 buffer = XALLOCAVEC (tree, nargs);
10453 for (i = 0; i < n; i++)
10454 buffer[i] = va_arg (newargs, tree);
10455 for (j = skip; j < oldnargs; j++, i++)
10456 buffer[i] = args[j];
10458 else
10459 buffer = args + skip;
10461 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10464 /* Return true if FNDECL shouldn't be folded right now.
10465 If a built-in function has an inline attribute always_inline
10466 wrapper, defer folding it after always_inline functions have
10467 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10468 might not be performed. */
10470 bool
10471 avoid_folding_inline_builtin (tree fndecl)
10473 return (DECL_DECLARED_INLINE_P (fndecl)
10474 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10475 && cfun
10476 && !cfun->always_inline_functions_inlined
10477 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10480 /* A wrapper function for builtin folding that prevents warnings for
10481 "statement without effect" and the like, caused by removing the
10482 call node earlier than the warning is generated. */
10484 tree
10485 fold_call_expr (location_t loc, tree exp, bool ignore)
10487 tree ret = NULL_TREE;
10488 tree fndecl = get_callee_fndecl (exp);
10489 if (fndecl && fndecl_built_in_p (fndecl)
10490 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10491 yet. Defer folding until we see all the arguments
10492 (after inlining). */
10493 && !CALL_EXPR_VA_ARG_PACK (exp))
10495 int nargs = call_expr_nargs (exp);
10497 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10498 instead last argument is __builtin_va_arg_pack (). Defer folding
10499 even in that case, until arguments are finalized. */
10500 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10502 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10503 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10504 return NULL_TREE;
10507 if (avoid_folding_inline_builtin (fndecl))
10508 return NULL_TREE;
10510 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10511 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10512 CALL_EXPR_ARGP (exp), ignore);
10513 else
10515 tree *args = CALL_EXPR_ARGP (exp);
10516 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10517 if (ret)
10518 return ret;
10521 return NULL_TREE;
10524 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10525 N arguments are passed in the array ARGARRAY. Return a folded
10526 expression or NULL_TREE if no simplification was possible. */
10528 tree
10529 fold_builtin_call_array (location_t loc, tree,
10530 tree fn,
10531 int n,
10532 tree *argarray)
10534 if (TREE_CODE (fn) != ADDR_EXPR)
10535 return NULL_TREE;
10537 tree fndecl = TREE_OPERAND (fn, 0);
10538 if (TREE_CODE (fndecl) == FUNCTION_DECL
10539 && fndecl_built_in_p (fndecl))
10541 /* If last argument is __builtin_va_arg_pack (), arguments to this
10542 function are not finalized yet. Defer folding until they are. */
10543 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10545 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10546 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10547 return NULL_TREE;
10549 if (avoid_folding_inline_builtin (fndecl))
10550 return NULL_TREE;
10551 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10552 return targetm.fold_builtin (fndecl, n, argarray, false);
10553 else
10554 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10557 return NULL_TREE;
10560 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10561 along with N new arguments specified as the "..." parameters. SKIP
10562 is the number of arguments in EXP to be omitted. This function is used
10563 to do varargs-to-varargs transformations. */
10565 static tree
10566 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10568 va_list ap;
10569 tree t;
10571 va_start (ap, n);
10572 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10573 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10574 va_end (ap);
10576 return t;
10579 /* Validate a single argument ARG against a tree code CODE representing
10580 a type. Return true when argument is valid. */
10582 static bool
10583 validate_arg (const_tree arg, enum tree_code code)
10585 if (!arg)
10586 return false;
10587 else if (code == POINTER_TYPE)
10588 return POINTER_TYPE_P (TREE_TYPE (arg));
10589 else if (code == INTEGER_TYPE)
10590 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10591 return code == TREE_CODE (TREE_TYPE (arg));
10594 /* This function validates the types of a function call argument list
10595 against a specified list of tree_codes. If the last specifier is a 0,
10596 that represents an ellipses, otherwise the last specifier must be a
10597 VOID_TYPE.
10599 This is the GIMPLE version of validate_arglist. Eventually we want to
10600 completely convert builtins.cc to work from GIMPLEs and the tree based
10601 validate_arglist will then be removed. */
10603 bool
10604 validate_gimple_arglist (const gcall *call, ...)
10606 enum tree_code code;
10607 bool res = 0;
10608 va_list ap;
10609 const_tree arg;
10610 size_t i;
10612 va_start (ap, call);
10613 i = 0;
10617 code = (enum tree_code) va_arg (ap, int);
10618 switch (code)
10620 case 0:
10621 /* This signifies an ellipses, any further arguments are all ok. */
10622 res = true;
10623 goto end;
10624 case VOID_TYPE:
10625 /* This signifies an endlink, if no arguments remain, return
10626 true, otherwise return false. */
10627 res = (i == gimple_call_num_args (call));
10628 goto end;
10629 default:
10630 /* If no parameters remain or the parameter's code does not
10631 match the specified code, return false. Otherwise continue
10632 checking any remaining arguments. */
10633 arg = gimple_call_arg (call, i++);
10634 if (!validate_arg (arg, code))
10635 goto end;
10636 break;
10639 while (1);
10641 /* We need gotos here since we can only have one VA_CLOSE in a
10642 function. */
10643 end: ;
10644 va_end (ap);
10646 return res;
10649 /* Default target-specific builtin expander that does nothing. */
10652 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10653 rtx target ATTRIBUTE_UNUSED,
10654 rtx subtarget ATTRIBUTE_UNUSED,
10655 machine_mode mode ATTRIBUTE_UNUSED,
10656 int ignore ATTRIBUTE_UNUSED)
10658 return NULL_RTX;
10661 /* Returns true is EXP represents data that would potentially reside
10662 in a readonly section. */
10664 bool
10665 readonly_data_expr (tree exp)
10667 STRIP_NOPS (exp);
10669 if (TREE_CODE (exp) != ADDR_EXPR)
10670 return false;
10672 exp = get_base_address (TREE_OPERAND (exp, 0));
10673 if (!exp)
10674 return false;
10676 /* Make sure we call decl_readonly_section only for trees it
10677 can handle (since it returns true for everything it doesn't
10678 understand). */
10679 if (TREE_CODE (exp) == STRING_CST
10680 || TREE_CODE (exp) == CONSTRUCTOR
10681 || (VAR_P (exp) && TREE_STATIC (exp)))
10682 return decl_readonly_section (exp, 0);
10683 else
10684 return false;
10687 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10688 to the call, and TYPE is its return type.
10690 Return NULL_TREE if no simplification was possible, otherwise return the
10691 simplified form of the call as a tree.
10693 The simplified form may be a constant or other expression which
10694 computes the same value, but in a more efficient manner (including
10695 calls to other builtin functions).
10697 The call may contain arguments which need to be evaluated, but
10698 which are not useful to determine the result of the call. In
10699 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10700 COMPOUND_EXPR will be an argument which must be evaluated.
10701 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10702 COMPOUND_EXPR in the chain will contain the tree for the simplified
10703 form of the builtin function call. */
10705 static tree
10706 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
10708 if (!validate_arg (s1, POINTER_TYPE)
10709 || !validate_arg (s2, POINTER_TYPE))
10710 return NULL_TREE;
10712 tree fn;
10713 const char *p1, *p2;
10715 p2 = c_getstr (s2);
10716 if (p2 == NULL)
10717 return NULL_TREE;
10719 p1 = c_getstr (s1);
10720 if (p1 != NULL)
10722 const char *r = strpbrk (p1, p2);
10723 tree tem;
10725 if (r == NULL)
10726 return build_int_cst (TREE_TYPE (s1), 0);
10728 /* Return an offset into the constant string argument. */
10729 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10730 return fold_convert_loc (loc, type, tem);
10733 if (p2[0] == '\0')
10734 /* strpbrk(x, "") == NULL.
10735 Evaluate and ignore s1 in case it had side-effects. */
10736 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10738 if (p2[1] != '\0')
10739 return NULL_TREE; /* Really call strpbrk. */
10741 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10742 if (!fn)
10743 return NULL_TREE;
10745 /* New argument list transforming strpbrk(s1, s2) to
10746 strchr(s1, s2[0]). */
10747 return build_call_expr_loc (loc, fn, 2, s1,
10748 build_int_cst (integer_type_node, p2[0]));
10751 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10752 to the call.
10754 Return NULL_TREE if no simplification was possible, otherwise return the
10755 simplified form of the call as a tree.
10757 The simplified form may be a constant or other expression which
10758 computes the same value, but in a more efficient manner (including
10759 calls to other builtin functions).
10761 The call may contain arguments which need to be evaluated, but
10762 which are not useful to determine the result of the call. In
10763 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10764 COMPOUND_EXPR will be an argument which must be evaluated.
10765 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10766 COMPOUND_EXPR in the chain will contain the tree for the simplified
10767 form of the builtin function call. */
10769 static tree
10770 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10772 if (!validate_arg (s1, POINTER_TYPE)
10773 || !validate_arg (s2, POINTER_TYPE))
10774 return NULL_TREE;
10776 if (!check_nul_terminated_array (expr, s1)
10777 || !check_nul_terminated_array (expr, s2))
10778 return NULL_TREE;
10780 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10782 /* If either argument is "", return NULL_TREE. */
10783 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10784 /* Evaluate and ignore both arguments in case either one has
10785 side-effects. */
10786 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10787 s1, s2);
10788 return NULL_TREE;
10791 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10792 to the call.
10794 Return NULL_TREE if no simplification was possible, otherwise return the
10795 simplified form of the call as a tree.
10797 The simplified form may be a constant or other expression which
10798 computes the same value, but in a more efficient manner (including
10799 calls to other builtin functions).
10801 The call may contain arguments which need to be evaluated, but
10802 which are not useful to determine the result of the call. In
10803 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10804 COMPOUND_EXPR will be an argument which must be evaluated.
10805 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10806 COMPOUND_EXPR in the chain will contain the tree for the simplified
10807 form of the builtin function call. */
10809 static tree
10810 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10812 if (!validate_arg (s1, POINTER_TYPE)
10813 || !validate_arg (s2, POINTER_TYPE))
10814 return NULL_TREE;
10816 if (!check_nul_terminated_array (expr, s1)
10817 || !check_nul_terminated_array (expr, s2))
10818 return NULL_TREE;
10820 /* If the first argument is "", return NULL_TREE. */
10821 const char *p1 = c_getstr (s1);
10822 if (p1 && *p1 == '\0')
10824 /* Evaluate and ignore argument s2 in case it has
10825 side-effects. */
10826 return omit_one_operand_loc (loc, size_type_node,
10827 size_zero_node, s2);
10830 /* If the second argument is "", return __builtin_strlen(s1). */
10831 const char *p2 = c_getstr (s2);
10832 if (p2 && *p2 == '\0')
10834 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10836 /* If the replacement _DECL isn't initialized, don't do the
10837 transformation. */
10838 if (!fn)
10839 return NULL_TREE;
10841 return build_call_expr_loc (loc, fn, 1, s1);
10843 return NULL_TREE;
10846 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10847 produced. False otherwise. This is done so that we don't output the error
10848 or warning twice or three times. */
10850 bool
10851 fold_builtin_next_arg (tree exp, bool va_start_p)
10853 tree fntype = TREE_TYPE (current_function_decl);
10854 int nargs = call_expr_nargs (exp);
10855 tree arg;
10856 /* There is good chance the current input_location points inside the
10857 definition of the va_start macro (perhaps on the token for
10858 builtin) in a system header, so warnings will not be emitted.
10859 Use the location in real source code. */
10860 location_t current_location =
10861 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10862 NULL);
10864 if (!stdarg_p (fntype))
10866 error ("%<va_start%> used in function with fixed arguments");
10867 return true;
10870 if (va_start_p)
10872 if (va_start_p && (nargs != 2))
10874 error ("wrong number of arguments to function %<va_start%>");
10875 return true;
10877 arg = CALL_EXPR_ARG (exp, 1);
10879 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10880 when we checked the arguments and if needed issued a warning. */
10881 else
10883 if (nargs == 0)
10885 /* Evidently an out of date version of <stdarg.h>; can't validate
10886 va_start's second argument, but can still work as intended. */
10887 warning_at (current_location,
10888 OPT_Wvarargs,
10889 "%<__builtin_next_arg%> called without an argument");
10890 return true;
10892 else if (nargs > 1)
10894 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10895 return true;
10897 arg = CALL_EXPR_ARG (exp, 0);
10900 if (TREE_CODE (arg) == SSA_NAME
10901 && SSA_NAME_VAR (arg))
10902 arg = SSA_NAME_VAR (arg);
10904 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10905 or __builtin_next_arg (0) the first time we see it, after checking
10906 the arguments and if needed issuing a warning. */
10907 if (!integer_zerop (arg))
10909 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10911 /* Strip off all nops for the sake of the comparison. This
10912 is not quite the same as STRIP_NOPS. It does more.
10913 We must also strip off INDIRECT_EXPR for C++ reference
10914 parameters. */
10915 while (CONVERT_EXPR_P (arg)
10916 || INDIRECT_REF_P (arg))
10917 arg = TREE_OPERAND (arg, 0);
10918 if (arg != last_parm)
10920 /* FIXME: Sometimes with the tree optimizers we can get the
10921 not the last argument even though the user used the last
10922 argument. We just warn and set the arg to be the last
10923 argument so that we will get wrong-code because of
10924 it. */
10925 warning_at (current_location,
10926 OPT_Wvarargs,
10927 "second parameter of %<va_start%> not last named argument");
10930 /* Undefined by C99 7.15.1.4p4 (va_start):
10931 "If the parameter parmN is declared with the register storage
10932 class, with a function or array type, or with a type that is
10933 not compatible with the type that results after application of
10934 the default argument promotions, the behavior is undefined."
10936 else if (DECL_REGISTER (arg))
10938 warning_at (current_location,
10939 OPT_Wvarargs,
10940 "undefined behavior when second parameter of "
10941 "%<va_start%> is declared with %<register%> storage");
10944 /* We want to verify the second parameter just once before the tree
10945 optimizers are run and then avoid keeping it in the tree,
10946 as otherwise we could warn even for correct code like:
10947 void foo (int i, ...)
10948 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10949 if (va_start_p)
10950 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10951 else
10952 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10954 return false;
10958 /* Expand a call EXP to __builtin_object_size. */
10960 static rtx
10961 expand_builtin_object_size (tree exp)
10963 tree ost;
10964 int object_size_type;
10965 tree fndecl = get_callee_fndecl (exp);
10967 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10969 error ("first argument of %qD must be a pointer, second integer constant",
10970 fndecl);
10971 expand_builtin_trap ();
10972 return const0_rtx;
10975 ost = CALL_EXPR_ARG (exp, 1);
10976 STRIP_NOPS (ost);
10978 if (TREE_CODE (ost) != INTEGER_CST
10979 || tree_int_cst_sgn (ost) < 0
10980 || compare_tree_int (ost, 3) > 0)
10982 error ("last argument of %qD is not integer constant between 0 and 3",
10983 fndecl);
10984 expand_builtin_trap ();
10985 return const0_rtx;
10988 object_size_type = tree_to_shwi (ost);
10990 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10993 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10994 FCODE is the BUILT_IN_* to use.
10995 Return NULL_RTX if we failed; the caller should emit a normal call,
10996 otherwise try to get the result in TARGET, if convenient (and in
10997 mode MODE if that's convenient). */
10999 static rtx
11000 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11001 enum built_in_function fcode)
11003 if (!validate_arglist (exp,
11004 POINTER_TYPE,
11005 fcode == BUILT_IN_MEMSET_CHK
11006 ? INTEGER_TYPE : POINTER_TYPE,
11007 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11008 return NULL_RTX;
11010 tree dest = CALL_EXPR_ARG (exp, 0);
11011 tree src = CALL_EXPR_ARG (exp, 1);
11012 tree len = CALL_EXPR_ARG (exp, 2);
11013 tree size = CALL_EXPR_ARG (exp, 3);
11015 /* FIXME: Set access mode to write only for memset et al. */
11016 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11017 /*srcstr=*/NULL_TREE, size, access_read_write);
11019 if (!tree_fits_uhwi_p (size))
11020 return NULL_RTX;
11022 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11024 /* Avoid transforming the checking call to an ordinary one when
11025 an overflow has been detected or when the call couldn't be
11026 validated because the size is not constant. */
11027 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11028 return NULL_RTX;
11030 tree fn = NULL_TREE;
11031 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11032 mem{cpy,pcpy,move,set} is available. */
11033 switch (fcode)
11035 case BUILT_IN_MEMCPY_CHK:
11036 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11037 break;
11038 case BUILT_IN_MEMPCPY_CHK:
11039 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11040 break;
11041 case BUILT_IN_MEMMOVE_CHK:
11042 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11043 break;
11044 case BUILT_IN_MEMSET_CHK:
11045 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11046 break;
11047 default:
11048 break;
11051 if (! fn)
11052 return NULL_RTX;
11054 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11055 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11056 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11057 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11059 else if (fcode == BUILT_IN_MEMSET_CHK)
11060 return NULL_RTX;
11061 else
11063 unsigned int dest_align = get_pointer_alignment (dest);
11065 /* If DEST is not a pointer type, call the normal function. */
11066 if (dest_align == 0)
11067 return NULL_RTX;
11069 /* If SRC and DEST are the same (and not volatile), do nothing. */
11070 if (operand_equal_p (src, dest, 0))
11072 tree expr;
11074 if (fcode != BUILT_IN_MEMPCPY_CHK)
11076 /* Evaluate and ignore LEN in case it has side-effects. */
11077 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11078 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11081 expr = fold_build_pointer_plus (dest, len);
11082 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11085 /* __memmove_chk special case. */
11086 if (fcode == BUILT_IN_MEMMOVE_CHK)
11088 unsigned int src_align = get_pointer_alignment (src);
11090 if (src_align == 0)
11091 return NULL_RTX;
11093 /* If src is categorized for a readonly section we can use
11094 normal __memcpy_chk. */
11095 if (readonly_data_expr (src))
11097 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11098 if (!fn)
11099 return NULL_RTX;
11100 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11101 dest, src, len, size);
11102 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11103 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11104 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11107 return NULL_RTX;
11111 /* Emit warning if a buffer overflow is detected at compile time. */
11113 static void
11114 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11116 /* The source string. */
11117 tree srcstr = NULL_TREE;
11118 /* The size of the destination object returned by __builtin_object_size. */
11119 tree objsize = NULL_TREE;
11120 /* The string that is being concatenated with (as in __strcat_chk)
11121 or null if it isn't. */
11122 tree catstr = NULL_TREE;
11123 /* The maximum length of the source sequence in a bounded operation
11124 (such as __strncat_chk) or null if the operation isn't bounded
11125 (such as __strcat_chk). */
11126 tree maxread = NULL_TREE;
11127 /* The exact size of the access (such as in __strncpy_chk). */
11128 tree size = NULL_TREE;
11129 /* The access by the function that's checked. Except for snprintf
11130 both writing and reading is checked. */
11131 access_mode mode = access_read_write;
11133 switch (fcode)
11135 case BUILT_IN_STRCPY_CHK:
11136 case BUILT_IN_STPCPY_CHK:
11137 srcstr = CALL_EXPR_ARG (exp, 1);
11138 objsize = CALL_EXPR_ARG (exp, 2);
11139 break;
11141 case BUILT_IN_STRCAT_CHK:
11142 /* For __strcat_chk the warning will be emitted only if overflowing
11143 by at least strlen (dest) + 1 bytes. */
11144 catstr = CALL_EXPR_ARG (exp, 0);
11145 srcstr = CALL_EXPR_ARG (exp, 1);
11146 objsize = CALL_EXPR_ARG (exp, 2);
11147 break;
11149 case BUILT_IN_STRNCAT_CHK:
11150 catstr = CALL_EXPR_ARG (exp, 0);
11151 srcstr = CALL_EXPR_ARG (exp, 1);
11152 maxread = CALL_EXPR_ARG (exp, 2);
11153 objsize = CALL_EXPR_ARG (exp, 3);
11154 break;
11156 case BUILT_IN_STRNCPY_CHK:
11157 case BUILT_IN_STPNCPY_CHK:
11158 srcstr = CALL_EXPR_ARG (exp, 1);
11159 size = CALL_EXPR_ARG (exp, 2);
11160 objsize = CALL_EXPR_ARG (exp, 3);
11161 break;
11163 case BUILT_IN_SNPRINTF_CHK:
11164 case BUILT_IN_VSNPRINTF_CHK:
11165 maxread = CALL_EXPR_ARG (exp, 1);
11166 objsize = CALL_EXPR_ARG (exp, 3);
11167 /* The only checked access the write to the destination. */
11168 mode = access_write_only;
11169 break;
11170 default:
11171 gcc_unreachable ();
11174 if (catstr && maxread)
11176 /* Check __strncat_chk. There is no way to determine the length
11177 of the string to which the source string is being appended so
11178 just warn when the length of the source string is not known. */
11179 check_strncat_sizes (exp, objsize);
11180 return;
11183 check_access (exp, size, maxread, srcstr, objsize, mode);
11186 /* Emit warning if a buffer overflow is detected at compile time
11187 in __sprintf_chk/__vsprintf_chk calls. */
11189 static void
11190 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11192 tree size, len, fmt;
11193 const char *fmt_str;
11194 int nargs = call_expr_nargs (exp);
11196 /* Verify the required arguments in the original call. */
11198 if (nargs < 4)
11199 return;
11200 size = CALL_EXPR_ARG (exp, 2);
11201 fmt = CALL_EXPR_ARG (exp, 3);
11203 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11204 return;
11206 /* Check whether the format is a literal string constant. */
11207 fmt_str = c_getstr (fmt);
11208 if (fmt_str == NULL)
11209 return;
11211 if (!init_target_chars ())
11212 return;
11214 /* If the format doesn't contain % args or %%, we know its size. */
11215 if (strchr (fmt_str, target_percent) == 0)
11216 len = build_int_cstu (size_type_node, strlen (fmt_str));
11217 /* If the format is "%s" and first ... argument is a string literal,
11218 we know it too. */
11219 else if (fcode == BUILT_IN_SPRINTF_CHK
11220 && strcmp (fmt_str, target_percent_s) == 0)
11222 tree arg;
11224 if (nargs < 5)
11225 return;
11226 arg = CALL_EXPR_ARG (exp, 4);
11227 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11228 return;
11230 len = c_strlen (arg, 1);
11231 if (!len || ! tree_fits_uhwi_p (len))
11232 return;
11234 else
11235 return;
11237 /* Add one for the terminating nul. */
11238 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11240 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11241 access_write_only);
11244 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11245 if possible. */
11247 static tree
11248 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11250 tree bytes;
11251 int object_size_type;
11253 if (!validate_arg (ptr, POINTER_TYPE)
11254 || !validate_arg (ost, INTEGER_TYPE))
11255 return NULL_TREE;
11257 STRIP_NOPS (ost);
11259 if (TREE_CODE (ost) != INTEGER_CST
11260 || tree_int_cst_sgn (ost) < 0
11261 || compare_tree_int (ost, 3) > 0)
11262 return NULL_TREE;
11264 object_size_type = tree_to_shwi (ost);
11266 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11267 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11268 and (size_t) 0 for types 2 and 3. */
11269 if (TREE_SIDE_EFFECTS (ptr))
11270 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11272 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11273 object_size_type |= OST_DYNAMIC;
11275 if (TREE_CODE (ptr) == ADDR_EXPR)
11277 compute_builtin_object_size (ptr, object_size_type, &bytes);
11278 if ((object_size_type & OST_DYNAMIC)
11279 || int_fits_type_p (bytes, size_type_node))
11280 return fold_convert (size_type_node, bytes);
11282 else if (TREE_CODE (ptr) == SSA_NAME)
11284 /* If object size is not known yet, delay folding until
11285 later. Maybe subsequent passes will help determining
11286 it. */
11287 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11288 && ((object_size_type & OST_DYNAMIC)
11289 || int_fits_type_p (bytes, size_type_node)))
11290 return fold_convert (size_type_node, bytes);
11293 return NULL_TREE;
11296 /* Builtins with folding operations that operate on "..." arguments
11297 need special handling; we need to store the arguments in a convenient
11298 data structure before attempting any folding. Fortunately there are
11299 only a few builtins that fall into this category. FNDECL is the
11300 function, EXP is the CALL_EXPR for the call. */
11302 static tree
11303 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11305 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11306 tree ret = NULL_TREE;
11308 switch (fcode)
11310 case BUILT_IN_FPCLASSIFY:
11311 ret = fold_builtin_fpclassify (loc, args, nargs);
11312 break;
11314 case BUILT_IN_ADDC:
11315 case BUILT_IN_ADDCL:
11316 case BUILT_IN_ADDCLL:
11317 case BUILT_IN_SUBC:
11318 case BUILT_IN_SUBCL:
11319 case BUILT_IN_SUBCLL:
11320 return fold_builtin_addc_subc (loc, fcode, args);
11322 default:
11323 break;
11325 if (ret)
11327 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11328 SET_EXPR_LOCATION (ret, loc);
11329 suppress_warning (ret);
11330 return ret;
11332 return NULL_TREE;
11335 /* Initialize format string characters in the target charset. */
11337 bool
11338 init_target_chars (void)
11340 static bool init;
11341 if (!init)
11343 target_newline = lang_hooks.to_target_charset ('\n');
11344 target_percent = lang_hooks.to_target_charset ('%');
11345 target_c = lang_hooks.to_target_charset ('c');
11346 target_s = lang_hooks.to_target_charset ('s');
11347 if (target_newline == 0 || target_percent == 0 || target_c == 0
11348 || target_s == 0)
11349 return false;
11351 target_percent_c[0] = target_percent;
11352 target_percent_c[1] = target_c;
11353 target_percent_c[2] = '\0';
11355 target_percent_s[0] = target_percent;
11356 target_percent_s[1] = target_s;
11357 target_percent_s[2] = '\0';
11359 target_percent_s_newline[0] = target_percent;
11360 target_percent_s_newline[1] = target_s;
11361 target_percent_s_newline[2] = target_newline;
11362 target_percent_s_newline[3] = '\0';
11364 init = true;
11366 return true;
11369 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11370 and no overflow/underflow occurred. INEXACT is true if M was not
11371 exactly calculated. TYPE is the tree type for the result. This
11372 function assumes that you cleared the MPFR flags and then
11373 calculated M to see if anything subsequently set a flag prior to
11374 entering this function. Return NULL_TREE if any checks fail. */
11376 static tree
11377 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11379 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11380 overflow/underflow occurred. If -frounding-math, proceed iff the
11381 result of calling FUNC was exact. */
11382 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11383 && (!flag_rounding_math || !inexact))
11385 REAL_VALUE_TYPE rr;
11387 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11388 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11389 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11390 but the mpfr_t is not, then we underflowed in the
11391 conversion. */
11392 if (real_isfinite (&rr)
11393 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11395 REAL_VALUE_TYPE rmode;
11397 real_convert (&rmode, TYPE_MODE (type), &rr);
11398 /* Proceed iff the specified mode can hold the value. */
11399 if (real_identical (&rmode, &rr))
11400 return build_real (type, rmode);
11403 return NULL_TREE;
11406 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11407 number and no overflow/underflow occurred. INEXACT is true if M
11408 was not exactly calculated. TYPE is the tree type for the result.
11409 This function assumes that you cleared the MPFR flags and then
11410 calculated M to see if anything subsequently set a flag prior to
11411 entering this function. Return NULL_TREE if any checks fail, if
11412 FORCE_CONVERT is true, then bypass the checks. */
11414 static tree
11415 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11417 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11418 overflow/underflow occurred. If -frounding-math, proceed iff the
11419 result of calling FUNC was exact. */
11420 if (force_convert
11421 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11422 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11423 && (!flag_rounding_math || !inexact)))
11425 REAL_VALUE_TYPE re, im;
11427 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11428 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11429 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11430 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11431 but the mpfr_t is not, then we underflowed in the
11432 conversion. */
11433 if (force_convert
11434 || (real_isfinite (&re) && real_isfinite (&im)
11435 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11436 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11438 REAL_VALUE_TYPE re_mode, im_mode;
11440 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11441 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11442 /* Proceed iff the specified mode can hold the value. */
11443 if (force_convert
11444 || (real_identical (&re_mode, &re)
11445 && real_identical (&im_mode, &im)))
11446 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11447 build_real (TREE_TYPE (type), im_mode));
11450 return NULL_TREE;
11453 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11454 the pointer *(ARG_QUO) and return the result. The type is taken
11455 from the type of ARG0 and is used for setting the precision of the
11456 calculation and results. */
11458 static tree
11459 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11461 tree const type = TREE_TYPE (arg0);
11462 tree result = NULL_TREE;
11464 STRIP_NOPS (arg0);
11465 STRIP_NOPS (arg1);
11467 /* To proceed, MPFR must exactly represent the target floating point
11468 format, which only happens when the target base equals two. */
11469 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11470 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11471 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11473 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11474 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11476 if (real_isfinite (ra0) && real_isfinite (ra1))
11478 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11479 const int prec = fmt->p;
11480 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11481 tree result_rem;
11482 long integer_quo;
11483 mpfr_t m0, m1;
11485 mpfr_inits2 (prec, m0, m1, NULL);
11486 mpfr_from_real (m0, ra0, MPFR_RNDN);
11487 mpfr_from_real (m1, ra1, MPFR_RNDN);
11488 mpfr_clear_flags ();
11489 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11490 /* Remquo is independent of the rounding mode, so pass
11491 inexact=0 to do_mpfr_ckconv(). */
11492 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11493 mpfr_clears (m0, m1, NULL);
11494 if (result_rem)
11496 /* MPFR calculates quo in the host's long so it may
11497 return more bits in quo than the target int can hold
11498 if sizeof(host long) > sizeof(target int). This can
11499 happen even for native compilers in LP64 mode. In
11500 these cases, modulo the quo value with the largest
11501 number that the target int can hold while leaving one
11502 bit for the sign. */
11503 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11504 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11506 /* Dereference the quo pointer argument. */
11507 arg_quo = build_fold_indirect_ref (arg_quo);
11508 /* Proceed iff a valid pointer type was passed in. */
11509 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11511 /* Set the value. */
11512 tree result_quo
11513 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11514 build_int_cst (TREE_TYPE (arg_quo),
11515 integer_quo));
11516 TREE_SIDE_EFFECTS (result_quo) = 1;
11517 /* Combine the quo assignment with the rem. */
11518 result = fold_build2 (COMPOUND_EXPR, type,
11519 result_quo, result_rem);
11520 suppress_warning (result, OPT_Wunused_value);
11521 result = non_lvalue (result);
11526 return result;
11529 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11530 resulting value as a tree with type TYPE. The mpfr precision is
11531 set to the precision of TYPE. We assume that this mpfr function
11532 returns zero if the result could be calculated exactly within the
11533 requested precision. In addition, the integer pointer represented
11534 by ARG_SG will be dereferenced and set to the appropriate signgam
11535 (-1,1) value. */
11537 static tree
11538 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11540 tree result = NULL_TREE;
11542 STRIP_NOPS (arg);
11544 /* To proceed, MPFR must exactly represent the target floating point
11545 format, which only happens when the target base equals two. Also
11546 verify ARG is a constant and that ARG_SG is an int pointer. */
11547 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11548 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11549 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11550 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11552 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11554 /* In addition to NaN and Inf, the argument cannot be zero or a
11555 negative integer. */
11556 if (real_isfinite (ra)
11557 && ra->cl != rvc_zero
11558 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11560 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11561 const int prec = fmt->p;
11562 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11563 int inexact, sg;
11564 tree result_lg;
11566 auto_mpfr m (prec);
11567 mpfr_from_real (m, ra, MPFR_RNDN);
11568 mpfr_clear_flags ();
11569 inexact = mpfr_lgamma (m, &sg, m, rnd);
11570 result_lg = do_mpfr_ckconv (m, type, inexact);
11571 if (result_lg)
11573 tree result_sg;
11575 /* Dereference the arg_sg pointer argument. */
11576 arg_sg = build_fold_indirect_ref (arg_sg);
11577 /* Assign the signgam value into *arg_sg. */
11578 result_sg = fold_build2 (MODIFY_EXPR,
11579 TREE_TYPE (arg_sg), arg_sg,
11580 build_int_cst (TREE_TYPE (arg_sg), sg));
11581 TREE_SIDE_EFFECTS (result_sg) = 1;
11582 /* Combine the signgam assignment with the lgamma result. */
11583 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11584 result_sg, result_lg));
11589 return result;
11592 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11593 mpc function FUNC on it and return the resulting value as a tree
11594 with type TYPE. The mpfr precision is set to the precision of
11595 TYPE. We assume that function FUNC returns zero if the result
11596 could be calculated exactly within the requested precision. If
11597 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11598 in the arguments and/or results. */
11600 tree
11601 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11602 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11604 tree result = NULL_TREE;
11606 STRIP_NOPS (arg0);
11607 STRIP_NOPS (arg1);
11609 /* To proceed, MPFR must exactly represent the target floating point
11610 format, which only happens when the target base equals two. */
11611 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11612 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
11613 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11614 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
11615 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11617 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11618 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11619 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11620 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11622 if (do_nonfinite
11623 || (real_isfinite (re0) && real_isfinite (im0)
11624 && real_isfinite (re1) && real_isfinite (im1)))
11626 const struct real_format *const fmt =
11627 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11628 const int prec = fmt->p;
11629 const mpfr_rnd_t rnd = fmt->round_towards_zero
11630 ? MPFR_RNDZ : MPFR_RNDN;
11631 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11632 int inexact;
11633 mpc_t m0, m1;
11635 mpc_init2 (m0, prec);
11636 mpc_init2 (m1, prec);
11637 mpfr_from_real (mpc_realref (m0), re0, rnd);
11638 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11639 mpfr_from_real (mpc_realref (m1), re1, rnd);
11640 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11641 mpfr_clear_flags ();
11642 inexact = func (m0, m0, m1, crnd);
11643 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11644 mpc_clear (m0);
11645 mpc_clear (m1);
11649 return result;
11652 /* A wrapper function for builtin folding that prevents warnings for
11653 "statement without effect" and the like, caused by removing the
11654 call node earlier than the warning is generated. */
11656 tree
11657 fold_call_stmt (gcall *stmt, bool ignore)
11659 tree ret = NULL_TREE;
11660 tree fndecl = gimple_call_fndecl (stmt);
11661 location_t loc = gimple_location (stmt);
11662 if (fndecl && fndecl_built_in_p (fndecl)
11663 && !gimple_call_va_arg_pack_p (stmt))
11665 int nargs = gimple_call_num_args (stmt);
11666 tree *args = (nargs > 0
11667 ? gimple_call_arg_ptr (stmt, 0)
11668 : &error_mark_node);
11670 if (avoid_folding_inline_builtin (fndecl))
11671 return NULL_TREE;
11672 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11674 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11676 else
11678 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11679 if (ret)
11681 /* Propagate location information from original call to
11682 expansion of builtin. Otherwise things like
11683 maybe_emit_chk_warning, that operate on the expansion
11684 of a builtin, will use the wrong location information. */
11685 if (gimple_has_location (stmt))
11687 tree realret = ret;
11688 if (TREE_CODE (ret) == NOP_EXPR)
11689 realret = TREE_OPERAND (ret, 0);
11690 if (CAN_HAVE_LOCATION_P (realret)
11691 && !EXPR_HAS_LOCATION (realret))
11692 SET_EXPR_LOCATION (realret, loc);
11693 return realret;
11695 return ret;
11699 return NULL_TREE;
11702 /* Look up the function in builtin_decl that corresponds to DECL
11703 and set ASMSPEC as its user assembler name. DECL must be a
11704 function decl that declares a builtin. */
11706 void
11707 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11709 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11710 && asmspec != 0);
11712 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11713 set_user_assembler_name (builtin, asmspec);
11715 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11716 && INT_TYPE_SIZE < BITS_PER_WORD)
11718 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11719 set_user_assembler_libfunc ("ffs", asmspec);
11720 set_optab_libfunc (ffs_optab, mode, "ffs");
11724 /* Return true if DECL is a builtin that expands to a constant or similarly
11725 simple code. */
11726 bool
11727 is_simple_builtin (tree decl)
11729 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11730 switch (DECL_FUNCTION_CODE (decl))
11732 /* Builtins that expand to constants. */
11733 case BUILT_IN_CONSTANT_P:
11734 case BUILT_IN_EXPECT:
11735 case BUILT_IN_OBJECT_SIZE:
11736 case BUILT_IN_UNREACHABLE:
11737 /* Simple register moves or loads from stack. */
11738 case BUILT_IN_ASSUME_ALIGNED:
11739 case BUILT_IN_RETURN_ADDRESS:
11740 case BUILT_IN_EXTRACT_RETURN_ADDR:
11741 case BUILT_IN_FROB_RETURN_ADDR:
11742 case BUILT_IN_RETURN:
11743 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11744 case BUILT_IN_FRAME_ADDRESS:
11745 case BUILT_IN_VA_END:
11746 case BUILT_IN_STACK_SAVE:
11747 case BUILT_IN_STACK_RESTORE:
11748 case BUILT_IN_DWARF_CFA:
11749 /* Exception state returns or moves registers around. */
11750 case BUILT_IN_EH_FILTER:
11751 case BUILT_IN_EH_POINTER:
11752 case BUILT_IN_EH_COPY_VALUES:
11753 return true;
11755 default:
11756 return false;
11759 return false;
11762 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11763 most probably expanded inline into reasonably simple code. This is a
11764 superset of is_simple_builtin. */
11765 bool
11766 is_inexpensive_builtin (tree decl)
11768 if (!decl)
11769 return false;
11770 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11771 return true;
11772 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11773 switch (DECL_FUNCTION_CODE (decl))
11775 case BUILT_IN_ABS:
11776 CASE_BUILT_IN_ALLOCA:
11777 case BUILT_IN_BSWAP16:
11778 case BUILT_IN_BSWAP32:
11779 case BUILT_IN_BSWAP64:
11780 case BUILT_IN_BSWAP128:
11781 case BUILT_IN_CLZ:
11782 case BUILT_IN_CLZIMAX:
11783 case BUILT_IN_CLZL:
11784 case BUILT_IN_CLZLL:
11785 case BUILT_IN_CTZ:
11786 case BUILT_IN_CTZIMAX:
11787 case BUILT_IN_CTZL:
11788 case BUILT_IN_CTZLL:
11789 case BUILT_IN_FFS:
11790 case BUILT_IN_FFSIMAX:
11791 case BUILT_IN_FFSL:
11792 case BUILT_IN_FFSLL:
11793 case BUILT_IN_IMAXABS:
11794 case BUILT_IN_FINITE:
11795 case BUILT_IN_FINITEF:
11796 case BUILT_IN_FINITEL:
11797 case BUILT_IN_FINITED32:
11798 case BUILT_IN_FINITED64:
11799 case BUILT_IN_FINITED128:
11800 case BUILT_IN_FPCLASSIFY:
11801 case BUILT_IN_ISFINITE:
11802 case BUILT_IN_ISINF_SIGN:
11803 case BUILT_IN_ISINF:
11804 case BUILT_IN_ISINFF:
11805 case BUILT_IN_ISINFL:
11806 case BUILT_IN_ISINFD32:
11807 case BUILT_IN_ISINFD64:
11808 case BUILT_IN_ISINFD128:
11809 case BUILT_IN_ISNAN:
11810 case BUILT_IN_ISNANF:
11811 case BUILT_IN_ISNANL:
11812 case BUILT_IN_ISNAND32:
11813 case BUILT_IN_ISNAND64:
11814 case BUILT_IN_ISNAND128:
11815 case BUILT_IN_ISNORMAL:
11816 case BUILT_IN_ISGREATER:
11817 case BUILT_IN_ISGREATEREQUAL:
11818 case BUILT_IN_ISLESS:
11819 case BUILT_IN_ISLESSEQUAL:
11820 case BUILT_IN_ISLESSGREATER:
11821 case BUILT_IN_ISUNORDERED:
11822 case BUILT_IN_ISEQSIG:
11823 case BUILT_IN_VA_ARG_PACK:
11824 case BUILT_IN_VA_ARG_PACK_LEN:
11825 case BUILT_IN_VA_COPY:
11826 case BUILT_IN_TRAP:
11827 case BUILT_IN_UNREACHABLE_TRAP:
11828 case BUILT_IN_SAVEREGS:
11829 case BUILT_IN_POPCOUNTL:
11830 case BUILT_IN_POPCOUNTLL:
11831 case BUILT_IN_POPCOUNTIMAX:
11832 case BUILT_IN_POPCOUNT:
11833 case BUILT_IN_PARITYL:
11834 case BUILT_IN_PARITYLL:
11835 case BUILT_IN_PARITYIMAX:
11836 case BUILT_IN_PARITY:
11837 case BUILT_IN_LABS:
11838 case BUILT_IN_LLABS:
11839 case BUILT_IN_PREFETCH:
11840 case BUILT_IN_ACC_ON_DEVICE:
11841 return true;
11843 default:
11844 return is_simple_builtin (decl);
11847 return false;
11850 /* Return true if T is a constant and the value cast to a target char
11851 can be represented by a host char.
11852 Store the casted char constant in *P if so. */
11854 bool
11855 target_char_cst_p (tree t, char *p)
11857 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11858 return false;
11860 *p = (char)tree_to_uhwi (t);
11861 return true;
11864 /* Return true if the builtin DECL is implemented in a standard library.
11865 Otherwise return false which doesn't guarantee it is not (thus the list
11866 of handled builtins below may be incomplete). */
11868 bool
11869 builtin_with_linkage_p (tree decl)
11871 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11872 switch (DECL_FUNCTION_CODE (decl))
11874 CASE_FLT_FN (BUILT_IN_ACOS):
11875 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
11876 CASE_FLT_FN (BUILT_IN_ACOSH):
11877 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
11878 CASE_FLT_FN (BUILT_IN_ASIN):
11879 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
11880 CASE_FLT_FN (BUILT_IN_ASINH):
11881 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
11882 CASE_FLT_FN (BUILT_IN_ATAN):
11883 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
11884 CASE_FLT_FN (BUILT_IN_ATANH):
11885 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
11886 CASE_FLT_FN (BUILT_IN_ATAN2):
11887 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
11888 CASE_FLT_FN (BUILT_IN_CBRT):
11889 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
11890 CASE_FLT_FN (BUILT_IN_CEIL):
11891 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11892 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11893 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11894 CASE_FLT_FN (BUILT_IN_COS):
11895 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
11896 CASE_FLT_FN (BUILT_IN_COSH):
11897 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
11898 CASE_FLT_FN (BUILT_IN_ERF):
11899 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
11900 CASE_FLT_FN (BUILT_IN_ERFC):
11901 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
11902 CASE_FLT_FN (BUILT_IN_EXP):
11903 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
11904 CASE_FLT_FN (BUILT_IN_EXP2):
11905 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
11906 CASE_FLT_FN (BUILT_IN_EXPM1):
11907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
11908 CASE_FLT_FN (BUILT_IN_FABS):
11909 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11910 CASE_FLT_FN (BUILT_IN_FDIM):
11911 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
11912 CASE_FLT_FN (BUILT_IN_FLOOR):
11913 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11914 CASE_FLT_FN (BUILT_IN_FMA):
11915 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11916 CASE_FLT_FN (BUILT_IN_FMAX):
11917 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11918 CASE_FLT_FN (BUILT_IN_FMIN):
11919 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11920 CASE_FLT_FN (BUILT_IN_FMOD):
11921 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
11922 CASE_FLT_FN (BUILT_IN_FREXP):
11923 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
11924 CASE_FLT_FN (BUILT_IN_HYPOT):
11925 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
11926 CASE_FLT_FN (BUILT_IN_ILOGB):
11927 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
11928 CASE_FLT_FN (BUILT_IN_LDEXP):
11929 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
11930 CASE_FLT_FN (BUILT_IN_LGAMMA):
11931 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
11932 CASE_FLT_FN (BUILT_IN_LLRINT):
11933 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
11934 CASE_FLT_FN (BUILT_IN_LLROUND):
11935 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
11936 CASE_FLT_FN (BUILT_IN_LOG):
11937 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
11938 CASE_FLT_FN (BUILT_IN_LOG10):
11939 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
11940 CASE_FLT_FN (BUILT_IN_LOG1P):
11941 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
11942 CASE_FLT_FN (BUILT_IN_LOG2):
11943 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
11944 CASE_FLT_FN (BUILT_IN_LOGB):
11945 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
11946 CASE_FLT_FN (BUILT_IN_LRINT):
11947 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
11948 CASE_FLT_FN (BUILT_IN_LROUND):
11949 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
11950 CASE_FLT_FN (BUILT_IN_MODF):
11951 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
11952 CASE_FLT_FN (BUILT_IN_NAN):
11953 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
11954 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11955 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11956 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11957 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
11958 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11959 CASE_FLT_FN (BUILT_IN_POW):
11960 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
11961 CASE_FLT_FN (BUILT_IN_REMAINDER):
11962 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
11963 CASE_FLT_FN (BUILT_IN_REMQUO):
11964 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
11965 CASE_FLT_FN (BUILT_IN_RINT):
11966 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11967 CASE_FLT_FN (BUILT_IN_ROUND):
11968 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11969 CASE_FLT_FN (BUILT_IN_SCALBLN):
11970 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
11971 CASE_FLT_FN (BUILT_IN_SCALBN):
11972 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
11973 CASE_FLT_FN (BUILT_IN_SIN):
11974 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
11975 CASE_FLT_FN (BUILT_IN_SINH):
11976 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
11977 CASE_FLT_FN (BUILT_IN_SINCOS):
11978 CASE_FLT_FN (BUILT_IN_SQRT):
11979 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11980 CASE_FLT_FN (BUILT_IN_TAN):
11981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
11982 CASE_FLT_FN (BUILT_IN_TANH):
11983 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
11984 CASE_FLT_FN (BUILT_IN_TGAMMA):
11985 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
11986 CASE_FLT_FN (BUILT_IN_TRUNC):
11987 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11988 return true;
11990 case BUILT_IN_STPCPY:
11991 case BUILT_IN_STPNCPY:
11992 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11993 by libiberty's stpcpy.c for MinGW targets so we need to return true
11994 in order to be able to build libiberty in LTO mode for them. */
11995 return true;
11997 default:
11998 break;
12000 return false;
12003 /* Return true if OFFRNG is bounded to a subrange of offset values
12004 valid for the largest possible object. */
12006 bool
12007 access_ref::offset_bounded () const
12009 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12010 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12011 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12014 /* If CALLEE has known side effects, fill in INFO and return true.
12015 See tree-ssa-structalias.cc:find_func_aliases
12016 for the list of builtins we might need to handle here. */
12018 attr_fnspec
12019 builtin_fnspec (tree callee)
12021 built_in_function code = DECL_FUNCTION_CODE (callee);
12023 switch (code)
12025 /* All the following functions read memory pointed to by
12026 their second argument and write memory pointed to by first
12027 argument.
12028 strcat/strncat additionally reads memory pointed to by the first
12029 argument. */
12030 case BUILT_IN_STRCAT:
12031 case BUILT_IN_STRCAT_CHK:
12032 return "1cW 1 ";
12033 case BUILT_IN_STRNCAT:
12034 case BUILT_IN_STRNCAT_CHK:
12035 return "1cW 13";
12036 case BUILT_IN_STRCPY:
12037 case BUILT_IN_STRCPY_CHK:
12038 return "1cO 1 ";
12039 case BUILT_IN_STPCPY:
12040 case BUILT_IN_STPCPY_CHK:
12041 return ".cO 1 ";
12042 case BUILT_IN_STRNCPY:
12043 case BUILT_IN_MEMCPY:
12044 case BUILT_IN_MEMMOVE:
12045 case BUILT_IN_TM_MEMCPY:
12046 case BUILT_IN_TM_MEMMOVE:
12047 case BUILT_IN_STRNCPY_CHK:
12048 case BUILT_IN_MEMCPY_CHK:
12049 case BUILT_IN_MEMMOVE_CHK:
12050 return "1cO313";
12051 case BUILT_IN_MEMPCPY:
12052 case BUILT_IN_MEMPCPY_CHK:
12053 return ".cO313";
12054 case BUILT_IN_STPNCPY:
12055 case BUILT_IN_STPNCPY_CHK:
12056 return ".cO313";
12057 case BUILT_IN_BCOPY:
12058 return ".c23O3";
12059 case BUILT_IN_BZERO:
12060 return ".cO2";
12061 case BUILT_IN_MEMCMP:
12062 case BUILT_IN_MEMCMP_EQ:
12063 case BUILT_IN_BCMP:
12064 case BUILT_IN_STRNCMP:
12065 case BUILT_IN_STRNCMP_EQ:
12066 case BUILT_IN_STRNCASECMP:
12067 return ".cR3R3";
12069 /* The following functions read memory pointed to by their
12070 first argument. */
12071 CASE_BUILT_IN_TM_LOAD (1):
12072 CASE_BUILT_IN_TM_LOAD (2):
12073 CASE_BUILT_IN_TM_LOAD (4):
12074 CASE_BUILT_IN_TM_LOAD (8):
12075 CASE_BUILT_IN_TM_LOAD (FLOAT):
12076 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12077 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12078 CASE_BUILT_IN_TM_LOAD (M64):
12079 CASE_BUILT_IN_TM_LOAD (M128):
12080 CASE_BUILT_IN_TM_LOAD (M256):
12081 case BUILT_IN_TM_LOG:
12082 case BUILT_IN_TM_LOG_1:
12083 case BUILT_IN_TM_LOG_2:
12084 case BUILT_IN_TM_LOG_4:
12085 case BUILT_IN_TM_LOG_8:
12086 case BUILT_IN_TM_LOG_FLOAT:
12087 case BUILT_IN_TM_LOG_DOUBLE:
12088 case BUILT_IN_TM_LOG_LDOUBLE:
12089 case BUILT_IN_TM_LOG_M64:
12090 case BUILT_IN_TM_LOG_M128:
12091 case BUILT_IN_TM_LOG_M256:
12092 return ".cR ";
12094 case BUILT_IN_INDEX:
12095 case BUILT_IN_RINDEX:
12096 case BUILT_IN_STRCHR:
12097 case BUILT_IN_STRLEN:
12098 case BUILT_IN_STRRCHR:
12099 return ".cR ";
12100 case BUILT_IN_STRNLEN:
12101 return ".cR2";
12103 /* These read memory pointed to by the first argument.
12104 Allocating memory does not have any side-effects apart from
12105 being the definition point for the pointer.
12106 Unix98 specifies that errno is set on allocation failure. */
12107 case BUILT_IN_STRDUP:
12108 return "mCR ";
12109 case BUILT_IN_STRNDUP:
12110 return "mCR2";
12111 /* Allocating memory does not have any side-effects apart from
12112 being the definition point for the pointer. */
12113 case BUILT_IN_MALLOC:
12114 case BUILT_IN_ALIGNED_ALLOC:
12115 case BUILT_IN_CALLOC:
12116 case BUILT_IN_GOMP_ALLOC:
12117 return "mC";
12118 CASE_BUILT_IN_ALLOCA:
12119 return "mc";
12120 /* These read memory pointed to by the first argument with size
12121 in the third argument. */
12122 case BUILT_IN_MEMCHR:
12123 return ".cR3";
12124 /* These read memory pointed to by the first and second arguments. */
12125 case BUILT_IN_STRSTR:
12126 case BUILT_IN_STRPBRK:
12127 case BUILT_IN_STRCASECMP:
12128 case BUILT_IN_STRCSPN:
12129 case BUILT_IN_STRSPN:
12130 case BUILT_IN_STRCMP:
12131 case BUILT_IN_STRCMP_EQ:
12132 return ".cR R ";
12133 /* Freeing memory kills the pointed-to memory. More importantly
12134 the call has to serve as a barrier for moving loads and stores
12135 across it. */
12136 case BUILT_IN_STACK_RESTORE:
12137 case BUILT_IN_FREE:
12138 case BUILT_IN_GOMP_FREE:
12139 return ".co ";
12140 case BUILT_IN_VA_END:
12141 return ".cO ";
12142 /* Realloc serves both as allocation point and deallocation point. */
12143 case BUILT_IN_REALLOC:
12144 return ".Cw ";
12145 case BUILT_IN_GAMMA_R:
12146 case BUILT_IN_GAMMAF_R:
12147 case BUILT_IN_GAMMAL_R:
12148 case BUILT_IN_LGAMMA_R:
12149 case BUILT_IN_LGAMMAF_R:
12150 case BUILT_IN_LGAMMAL_R:
12151 return ".C. Ot";
12152 case BUILT_IN_FREXP:
12153 case BUILT_IN_FREXPF:
12154 case BUILT_IN_FREXPL:
12155 case BUILT_IN_MODF:
12156 case BUILT_IN_MODFF:
12157 case BUILT_IN_MODFL:
12158 return ".c. Ot";
12159 case BUILT_IN_REMQUO:
12160 case BUILT_IN_REMQUOF:
12161 case BUILT_IN_REMQUOL:
12162 return ".c. . Ot";
12163 case BUILT_IN_SINCOS:
12164 case BUILT_IN_SINCOSF:
12165 case BUILT_IN_SINCOSL:
12166 return ".c. OtOt";
12167 case BUILT_IN_MEMSET:
12168 case BUILT_IN_MEMSET_CHK:
12169 case BUILT_IN_TM_MEMSET:
12170 return "1cO3";
12171 CASE_BUILT_IN_TM_STORE (1):
12172 CASE_BUILT_IN_TM_STORE (2):
12173 CASE_BUILT_IN_TM_STORE (4):
12174 CASE_BUILT_IN_TM_STORE (8):
12175 CASE_BUILT_IN_TM_STORE (FLOAT):
12176 CASE_BUILT_IN_TM_STORE (DOUBLE):
12177 CASE_BUILT_IN_TM_STORE (LDOUBLE):
12178 CASE_BUILT_IN_TM_STORE (M64):
12179 CASE_BUILT_IN_TM_STORE (M128):
12180 CASE_BUILT_IN_TM_STORE (M256):
12181 return ".cO ";
12182 case BUILT_IN_STACK_SAVE:
12183 case BUILT_IN_RETURN:
12184 case BUILT_IN_EH_POINTER:
12185 case BUILT_IN_EH_FILTER:
12186 case BUILT_IN_UNWIND_RESUME:
12187 case BUILT_IN_CXA_END_CLEANUP:
12188 case BUILT_IN_EH_COPY_VALUES:
12189 case BUILT_IN_FRAME_ADDRESS:
12190 case BUILT_IN_APPLY_ARGS:
12191 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12192 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12193 case BUILT_IN_PREFETCH:
12194 case BUILT_IN_DWARF_CFA:
12195 case BUILT_IN_RETURN_ADDRESS:
12196 return ".c";
12197 case BUILT_IN_ASSUME_ALIGNED:
12198 case BUILT_IN_EXPECT:
12199 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12200 return "1cX ";
12201 /* But posix_memalign stores a pointer into the memory pointed to
12202 by its first argument. */
12203 case BUILT_IN_POSIX_MEMALIGN:
12204 return ".cOt";
12206 default:
12207 return "";