Daily bump.
[official-gcc.git] / gcc / builtins.c
blobf1c3fea3583d3cb59e225d7efac54c515d471c04
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_cexpi (tree, rtx);
123 static rtx expand_builtin_int_roundingfn (tree, rtx);
124 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125 static rtx expand_builtin_next_arg (void);
126 static rtx expand_builtin_va_start (tree);
127 static rtx expand_builtin_va_end (tree);
128 static rtx expand_builtin_va_copy (tree);
129 static rtx inline_expand_builtin_bytecmp (tree, rtx);
130 static rtx expand_builtin_strcmp (tree, rtx);
131 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132 static rtx expand_builtin_memcpy (tree, rtx);
133 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
134 rtx target, tree exp,
135 memop_ret retmode,
136 bool might_overlap);
137 static rtx expand_builtin_memmove (tree, rtx);
138 static rtx expand_builtin_mempcpy (tree, rtx);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_strncpy (tree, rtx);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static tree fold_builtin_object_size (tree, tree);
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
194 /* Return true if NAME starts with __builtin_ or __sync_. */
196 static bool
197 is_builtin_name (const char *name)
199 return (startswith (name, "__builtin_")
200 || startswith (name, "__sync_")
201 || startswith (name, "__atomic_"));
204 /* Return true if NODE should be considered for inline expansion regardless
205 of the optimization level. This means whenever a function is invoked with
206 its "internal" name, which normally contains the prefix "__builtin". */
208 bool
209 called_as_built_in (tree node)
211 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
212 we want the name used to call the function, not the name it
213 will have. */
214 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
215 return is_builtin_name (name);
218 /* Compute values M and N such that M divides (address of EXP - N) and such
219 that N < M. If these numbers can be determined, store M in alignp and N in
220 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
221 *alignp and any bit-offset to *bitposp.
223 Note that the address (and thus the alignment) computed here is based
224 on the address to which a symbol resolves, whereas DECL_ALIGN is based
225 on the address at which an object is actually located. These two
226 addresses are not always the same. For example, on ARM targets,
227 the address &foo of a Thumb function foo() has the lowest bit set,
228 whereas foo() itself starts on an even address.
230 If ADDR_P is true we are taking the address of the memory reference EXP
231 and thus cannot rely on the access taking place. */
233 static bool
234 get_object_alignment_2 (tree exp, unsigned int *alignp,
235 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
237 poly_int64 bitsize, bitpos;
238 tree offset;
239 machine_mode mode;
240 int unsignedp, reversep, volatilep;
241 unsigned int align = BITS_PER_UNIT;
242 bool known_alignment = false;
244 /* Get the innermost object and the constant (bitpos) and possibly
245 variable (offset) offset of the access. */
246 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
247 &unsignedp, &reversep, &volatilep);
249 /* Extract alignment information from the innermost object and
250 possibly adjust bitpos and offset. */
251 if (TREE_CODE (exp) == FUNCTION_DECL)
253 /* Function addresses can encode extra information besides their
254 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
255 allows the low bit to be used as a virtual bit, we know
256 that the address itself must be at least 2-byte aligned. */
257 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
258 align = 2 * BITS_PER_UNIT;
260 else if (TREE_CODE (exp) == LABEL_DECL)
262 else if (TREE_CODE (exp) == CONST_DECL)
264 /* The alignment of a CONST_DECL is determined by its initializer. */
265 exp = DECL_INITIAL (exp);
266 align = TYPE_ALIGN (TREE_TYPE (exp));
267 if (CONSTANT_CLASS_P (exp))
268 align = targetm.constant_alignment (exp, align);
270 known_alignment = true;
272 else if (DECL_P (exp))
274 align = DECL_ALIGN (exp);
275 known_alignment = true;
277 else if (TREE_CODE (exp) == INDIRECT_REF
278 || TREE_CODE (exp) == MEM_REF
279 || TREE_CODE (exp) == TARGET_MEM_REF)
281 tree addr = TREE_OPERAND (exp, 0);
282 unsigned ptr_align;
283 unsigned HOST_WIDE_INT ptr_bitpos;
284 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
286 /* If the address is explicitely aligned, handle that. */
287 if (TREE_CODE (addr) == BIT_AND_EXPR
288 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
290 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
291 ptr_bitmask *= BITS_PER_UNIT;
292 align = least_bit_hwi (ptr_bitmask);
293 addr = TREE_OPERAND (addr, 0);
296 known_alignment
297 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
298 align = MAX (ptr_align, align);
300 /* Re-apply explicit alignment to the bitpos. */
301 ptr_bitpos &= ptr_bitmask;
303 /* The alignment of the pointer operand in a TARGET_MEM_REF
304 has to take the variable offset parts into account. */
305 if (TREE_CODE (exp) == TARGET_MEM_REF)
307 if (TMR_INDEX (exp))
309 unsigned HOST_WIDE_INT step = 1;
310 if (TMR_STEP (exp))
311 step = TREE_INT_CST_LOW (TMR_STEP (exp));
312 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
314 if (TMR_INDEX2 (exp))
315 align = BITS_PER_UNIT;
316 known_alignment = false;
319 /* When EXP is an actual memory reference then we can use
320 TYPE_ALIGN of a pointer indirection to derive alignment.
321 Do so only if get_pointer_alignment_1 did not reveal absolute
322 alignment knowledge and if using that alignment would
323 improve the situation. */
324 unsigned int talign;
325 if (!addr_p && !known_alignment
326 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
327 && talign > align)
328 align = talign;
329 else
331 /* Else adjust bitpos accordingly. */
332 bitpos += ptr_bitpos;
333 if (TREE_CODE (exp) == MEM_REF
334 || TREE_CODE (exp) == TARGET_MEM_REF)
335 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
338 else if (TREE_CODE (exp) == STRING_CST)
340 /* STRING_CST are the only constant objects we allow to be not
341 wrapped inside a CONST_DECL. */
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 if (CONSTANT_CLASS_P (exp))
344 align = targetm.constant_alignment (exp, align);
346 known_alignment = true;
349 /* If there is a non-constant offset part extract the maximum
350 alignment that can prevail. */
351 if (offset)
353 unsigned int trailing_zeros = tree_ctz (offset);
354 if (trailing_zeros < HOST_BITS_PER_INT)
356 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
357 if (inner)
358 align = MIN (align, inner);
362 /* Account for the alignment of runtime coefficients, so that the constant
363 bitpos is guaranteed to be accurate. */
364 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
365 if (alt_align != 0 && alt_align < align)
367 align = alt_align;
368 known_alignment = false;
371 *alignp = align;
372 *bitposp = bitpos.coeffs[0] & (align - 1);
373 return known_alignment;
376 /* For a memory reference expression EXP compute values M and N such that M
377 divides (&EXP - N) and such that N < M. If these numbers can be determined,
378 store M in alignp and N in *BITPOSP and return true. Otherwise return false
379 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
381 bool
382 get_object_alignment_1 (tree exp, unsigned int *alignp,
383 unsigned HOST_WIDE_INT *bitposp)
385 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
386 with it. */
387 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
388 exp = TREE_OPERAND (exp, 0);
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
506 return align;
509 /* Return the number of leading non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513 unsigned
514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518 unsigned n;
520 if (eltsize == 1)
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
530 else
532 for (n = 0; n < maxelts; n++)
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
539 return n;
542 /* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 Additional information about the string accessed may be recorded
560 in DATA. For example, if ARG references an unterminated string,
561 then the declaration will be stored in the DECL field. If the
562 length of the unterminated string can be determined, it'll be
563 stored in the LEN field. Note this length could well be different
564 than what a C strlen call would return.
566 ELTSIZE is 1 for normal single byte character strings, and 2 or
567 4 for wide characer strings. ELTSIZE is by default 1.
569 The value returned is of type `ssizetype'. */
571 tree
572 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
574 /* If we were not passed a DATA pointer, then get one to a local
575 structure. That avoids having to check DATA for NULL before
576 each time we want to use it. */
577 c_strlen_data local_strlen_data = { };
578 if (!data)
579 data = &local_strlen_data;
581 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
583 tree src = STRIP_NOPS (arg);
584 if (TREE_CODE (src) == COND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 tree len1, len2;
589 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
590 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
591 if (tree_int_cst_equal (len1, len2))
592 return len1;
595 if (TREE_CODE (src) == COMPOUND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
597 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
599 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
601 /* Offset from the beginning of the string in bytes. */
602 tree byteoff;
603 tree memsize;
604 tree decl;
605 src = string_constant (src, &byteoff, &memsize, &decl);
606 if (src == 0)
607 return NULL_TREE;
609 /* Determine the size of the string element. */
610 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
611 return NULL_TREE;
613 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
614 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
615 in case the latter is less than the size of the array, such as when
616 SRC refers to a short string literal used to initialize a large array.
617 In that case, the elements of the array after the terminating NUL are
618 all NUL. */
619 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
620 strelts = strelts / eltsize;
622 if (!tree_fits_uhwi_p (memsize))
623 return NULL_TREE;
625 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
627 /* PTR can point to the byte representation of any string type, including
628 char* and wchar_t*. */
629 const char *ptr = TREE_STRING_POINTER (src);
631 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
633 /* The code below works only for single byte character types. */
634 if (eltsize != 1)
635 return NULL_TREE;
637 /* If the string has an internal NUL character followed by any
638 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 the offset to the following NUL if we don't know where to
640 start searching for it. */
641 unsigned len = string_length (ptr, eltsize, strelts);
643 /* Return when an embedded null character is found or none at all.
644 In the latter case, set the DECL/LEN field in the DATA structure
645 so that callers may examine them. */
646 if (len + 1 < strelts)
647 return NULL_TREE;
648 else if (len >= maxelts)
650 data->decl = decl;
651 data->off = byteoff;
652 data->minlen = ssize_int (len);
653 return NULL_TREE;
656 /* For empty strings the result should be zero. */
657 if (len == 0)
658 return ssize_int (0);
660 /* We don't know the starting offset, but we do know that the string
661 has no internal zero bytes. If the offset falls within the bounds
662 of the string subtract the offset from the length of the string,
663 and return that. Otherwise the length is zero. Take care to
664 use SAVE_EXPR in case the OFFSET has side-effects. */
665 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
666 : byteoff;
667 offsave = fold_convert_loc (loc, sizetype, offsave);
668 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
669 size_int (len));
670 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
671 offsave);
672 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
673 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
674 build_zero_cst (ssizetype));
677 /* Offset from the beginning of the string in elements. */
678 HOST_WIDE_INT eltoff;
680 /* We have a known offset into the string. Start searching there for
681 a null character if we can represent it as a single HOST_WIDE_INT. */
682 if (byteoff == 0)
683 eltoff = 0;
684 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
685 eltoff = -1;
686 else
687 eltoff = tree_to_uhwi (byteoff) / eltsize;
689 /* If the offset is known to be out of bounds, warn, and call strlen at
690 runtime. */
691 if (eltoff < 0 || eltoff >= maxelts)
693 /* Suppress multiple warnings for propagated constant strings. */
694 if (only_value != 2
695 && !warning_suppressed_p (arg, OPT_Warray_bounds)
696 && warning_at (loc, OPT_Warray_bounds,
697 "offset %qwi outside bounds of constant string",
698 eltoff))
700 if (decl)
701 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
702 suppress_warning (arg, OPT_Warray_bounds);
704 return NULL_TREE;
707 /* If eltoff is larger than strelts but less than maxelts the
708 string length is zero, since the excess memory will be zero. */
709 if (eltoff > strelts)
710 return ssize_int (0);
712 /* Use strlen to search for the first zero byte. Since any strings
713 constructed with build_string will have nulls appended, we win even
714 if we get handed something like (char[4])"abcd".
716 Since ELTOFF is our starting index into the string, no further
717 calculation is needed. */
718 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
719 strelts - eltoff);
721 /* Don't know what to return if there was no zero termination.
722 Ideally this would turn into a gcc_checking_assert over time.
723 Set DECL/LEN so callers can examine them. */
724 if (len >= maxelts - eltoff)
726 data->decl = decl;
727 data->off = byteoff;
728 data->minlen = ssize_int (len);
729 return NULL_TREE;
732 return ssize_int (len);
735 /* Return a constant integer corresponding to target reading
736 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
737 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
738 are assumed to be zero, otherwise it reads as many characters
739 as needed. */
742 c_readstr (const char *str, scalar_int_mode mode,
743 bool null_terminated_p/*=true*/)
745 HOST_WIDE_INT ch;
746 unsigned int i, j;
747 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
749 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
750 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
751 / HOST_BITS_PER_WIDE_INT;
753 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
754 for (i = 0; i < len; i++)
755 tmp[i] = 0;
757 ch = 1;
758 for (i = 0; i < GET_MODE_SIZE (mode); i++)
760 j = i;
761 if (WORDS_BIG_ENDIAN)
762 j = GET_MODE_SIZE (mode) - i - 1;
763 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
764 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
765 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
766 j *= BITS_PER_UNIT;
768 if (ch || !null_terminated_p)
769 ch = (unsigned char) str[i];
770 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
773 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
774 return immed_wide_int_const (c, mode);
777 /* Cast a target constant CST to target CHAR and if that value fits into
778 host char type, return zero and put that value into variable pointed to by
779 P. */
781 static int
782 target_char_cast (tree cst, char *p)
784 unsigned HOST_WIDE_INT val, hostval;
786 if (TREE_CODE (cst) != INTEGER_CST
787 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
788 return 1;
790 /* Do not care if it fits or not right here. */
791 val = TREE_INT_CST_LOW (cst);
793 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
794 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
796 hostval = val;
797 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
798 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
800 if (val != hostval)
801 return 1;
803 *p = hostval;
804 return 0;
807 /* Similar to save_expr, but assumes that arbitrary code is not executed
808 in between the multiple evaluations. In particular, we assume that a
809 non-addressable local variable will not be modified. */
811 static tree
812 builtin_save_expr (tree exp)
814 if (TREE_CODE (exp) == SSA_NAME
815 || (TREE_ADDRESSABLE (exp) == 0
816 && (TREE_CODE (exp) == PARM_DECL
817 || (VAR_P (exp) && !TREE_STATIC (exp)))))
818 return exp;
820 return save_expr (exp);
823 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
824 times to get the address of either a higher stack frame, or a return
825 address located within it (depending on FNDECL_CODE). */
827 static rtx
828 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
830 int i;
831 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
832 if (tem == NULL_RTX)
834 /* For a zero count with __builtin_return_address, we don't care what
835 frame address we return, because target-specific definitions will
836 override us. Therefore frame pointer elimination is OK, and using
837 the soft frame pointer is OK.
839 For a nonzero count, or a zero count with __builtin_frame_address,
840 we require a stable offset from the current frame pointer to the
841 previous one, so we must use the hard frame pointer, and
842 we must disable frame pointer elimination. */
843 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
844 tem = frame_pointer_rtx;
845 else
847 tem = hard_frame_pointer_rtx;
849 /* Tell reload not to eliminate the frame pointer. */
850 crtl->accesses_prior_frames = 1;
854 if (count > 0)
855 SETUP_FRAME_ADDRESSES ();
857 /* On the SPARC, the return address is not in the frame, it is in a
858 register. There is no way to access it off of the current frame
859 pointer, but it can be accessed off the previous frame pointer by
860 reading the value from the register window save area. */
861 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
862 count--;
864 /* Scan back COUNT frames to the specified frame. */
865 for (i = 0; i < count; i++)
867 /* Assume the dynamic chain pointer is in the word that the
868 frame address points to, unless otherwise specified. */
869 tem = DYNAMIC_CHAIN_ADDRESS (tem);
870 tem = memory_address (Pmode, tem);
871 tem = gen_frame_mem (Pmode, tem);
872 tem = copy_to_reg (tem);
875 /* For __builtin_frame_address, return what we've got. But, on
876 the SPARC for example, we may have to add a bias. */
877 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
878 return FRAME_ADDR_RTX (tem);
880 /* For __builtin_return_address, get the return address from that frame. */
881 #ifdef RETURN_ADDR_RTX
882 tem = RETURN_ADDR_RTX (count, tem);
883 #else
884 tem = memory_address (Pmode,
885 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
886 tem = gen_frame_mem (Pmode, tem);
887 #endif
888 return tem;
891 /* Alias set used for setjmp buffer. */
892 static alias_set_type setjmp_alias_set = -1;
894 /* Construct the leading half of a __builtin_setjmp call. Control will
895 return to RECEIVER_LABEL. This is also called directly by the SJLJ
896 exception handling code. */
898 void
899 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
901 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
902 rtx stack_save;
903 rtx mem;
905 if (setjmp_alias_set == -1)
906 setjmp_alias_set = new_alias_set ();
908 buf_addr = convert_memory_address (Pmode, buf_addr);
910 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
912 /* We store the frame pointer and the address of receiver_label in
913 the buffer and use the rest of it for the stack save area, which
914 is machine-dependent. */
916 mem = gen_rtx_MEM (Pmode, buf_addr);
917 set_mem_alias_set (mem, setjmp_alias_set);
918 emit_move_insn (mem, hard_frame_pointer_rtx);
920 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
921 GET_MODE_SIZE (Pmode))),
922 set_mem_alias_set (mem, setjmp_alias_set);
924 emit_move_insn (validize_mem (mem),
925 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
927 stack_save = gen_rtx_MEM (sa_mode,
928 plus_constant (Pmode, buf_addr,
929 2 * GET_MODE_SIZE (Pmode)));
930 set_mem_alias_set (stack_save, setjmp_alias_set);
931 emit_stack_save (SAVE_NONLOCAL, &stack_save);
933 /* If there is further processing to do, do it. */
934 if (targetm.have_builtin_setjmp_setup ())
935 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
937 /* We have a nonlocal label. */
938 cfun->has_nonlocal_label = 1;
941 /* Construct the trailing part of a __builtin_setjmp call. This is
942 also called directly by the SJLJ exception handling code.
943 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
945 void
946 expand_builtin_setjmp_receiver (rtx receiver_label)
948 rtx chain;
950 /* Mark the FP as used when we get here, so we have to make sure it's
951 marked as used by this function. */
952 emit_use (hard_frame_pointer_rtx);
954 /* Mark the static chain as clobbered here so life information
955 doesn't get messed up for it. */
956 chain = rtx_for_static_chain (current_function_decl, true);
957 if (chain && REG_P (chain))
958 emit_clobber (chain);
960 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
962 /* If the argument pointer can be eliminated in favor of the
963 frame pointer, we don't need to restore it. We assume here
964 that if such an elimination is present, it can always be used.
965 This is the case on all known machines; if we don't make this
966 assumption, we do unnecessary saving on many machines. */
967 size_t i;
968 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
970 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
971 if (elim_regs[i].from == ARG_POINTER_REGNUM
972 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
973 break;
975 if (i == ARRAY_SIZE (elim_regs))
977 /* Now restore our arg pointer from the address at which it
978 was saved in our stack frame. */
979 emit_move_insn (crtl->args.internal_arg_pointer,
980 copy_to_reg (get_arg_pointer_save_area ()));
984 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
985 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
986 else if (targetm.have_nonlocal_goto_receiver ())
987 emit_insn (targetm.gen_nonlocal_goto_receiver ());
988 else
989 { /* Nothing */ }
991 /* We must not allow the code we just generated to be reordered by
992 scheduling. Specifically, the update of the frame pointer must
993 happen immediately, not later. */
994 emit_insn (gen_blockage ());
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1002 static void
1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1005 rtx fp, lab, stack;
1006 rtx_insn *insn, *last;
1007 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1009 /* DRAP is needed for stack realign if longjmp is expanded to current
1010 function */
1011 if (SUPPORTS_STACK_ALIGNMENT)
1012 crtl->need_drap = true;
1014 if (setjmp_alias_set == -1)
1015 setjmp_alias_set = new_alias_set ();
1017 buf_addr = convert_memory_address (Pmode, buf_addr);
1019 buf_addr = force_reg (Pmode, buf_addr);
1021 /* We require that the user must pass a second argument of 1, because
1022 that is what builtin_setjmp will return. */
1023 gcc_assert (value == const1_rtx);
1025 last = get_last_insn ();
1026 if (targetm.have_builtin_longjmp ())
1027 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1028 else
1030 fp = gen_rtx_MEM (Pmode, buf_addr);
1031 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1032 GET_MODE_SIZE (Pmode)));
1034 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1035 2 * GET_MODE_SIZE (Pmode)));
1036 set_mem_alias_set (fp, setjmp_alias_set);
1037 set_mem_alias_set (lab, setjmp_alias_set);
1038 set_mem_alias_set (stack, setjmp_alias_set);
1040 /* Pick up FP, label, and SP from the block and jump. This code is
1041 from expand_goto in stmt.c; see there for detailed comments. */
1042 if (targetm.have_nonlocal_goto ())
1043 /* We have to pass a value to the nonlocal_goto pattern that will
1044 get copied into the static_chain pointer, but it does not matter
1045 what that value is, because builtin_setjmp does not use it. */
1046 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1047 else
1049 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1050 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1052 lab = copy_to_reg (lab);
1054 /* Restore the frame pointer and stack pointer. We must use a
1055 temporary since the setjmp buffer may be a local. */
1056 fp = copy_to_reg (fp);
1057 emit_stack_restore (SAVE_NONLOCAL, stack);
1059 /* Ensure the frame pointer move is not optimized. */
1060 emit_insn (gen_blockage ());
1061 emit_clobber (hard_frame_pointer_rtx);
1062 emit_clobber (frame_pointer_rtx);
1063 emit_move_insn (hard_frame_pointer_rtx, fp);
1065 emit_use (hard_frame_pointer_rtx);
1066 emit_use (stack_pointer_rtx);
1067 emit_indirect_jump (lab);
1071 /* Search backwards and mark the jump insn as a non-local goto.
1072 Note that this precludes the use of __builtin_longjmp to a
1073 __builtin_setjmp target in the same function. However, we've
1074 already cautioned the user that these functions are for
1075 internal exception handling use only. */
1076 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1078 gcc_assert (insn != last);
1080 if (JUMP_P (insn))
1082 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1083 break;
1085 else if (CALL_P (insn))
1086 break;
1090 static inline bool
1091 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1093 return (iter->i < iter->n);
1096 /* This function validates the types of a function call argument list
1097 against a specified list of tree_codes. If the last specifier is a 0,
1098 that represents an ellipsis, otherwise the last specifier must be a
1099 VOID_TYPE. */
1101 static bool
1102 validate_arglist (const_tree callexpr, ...)
1104 enum tree_code code;
1105 bool res = 0;
1106 va_list ap;
1107 const_call_expr_arg_iterator iter;
1108 const_tree arg;
1110 va_start (ap, callexpr);
1111 init_const_call_expr_arg_iterator (callexpr, &iter);
1113 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1114 tree fn = CALL_EXPR_FN (callexpr);
1115 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1117 for (unsigned argno = 1; ; ++argno)
1119 code = (enum tree_code) va_arg (ap, int);
1121 switch (code)
1123 case 0:
1124 /* This signifies an ellipses, any further arguments are all ok. */
1125 res = true;
1126 goto end;
1127 case VOID_TYPE:
1128 /* This signifies an endlink, if no arguments remain, return
1129 true, otherwise return false. */
1130 res = !more_const_call_expr_args_p (&iter);
1131 goto end;
1132 case POINTER_TYPE:
1133 /* The actual argument must be nonnull when either the whole
1134 called function has been declared nonnull, or when the formal
1135 argument corresponding to the actual argument has been. */
1136 if (argmap
1137 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1139 arg = next_const_call_expr_arg (&iter);
1140 if (!validate_arg (arg, code) || integer_zerop (arg))
1141 goto end;
1142 break;
1144 /* FALLTHRU */
1145 default:
1146 /* If no parameters remain or the parameter's code does not
1147 match the specified code, return false. Otherwise continue
1148 checking any remaining arguments. */
1149 arg = next_const_call_expr_arg (&iter);
1150 if (!validate_arg (arg, code))
1151 goto end;
1152 break;
1156 /* We need gotos here since we can only have one VA_CLOSE in a
1157 function. */
1158 end: ;
1159 va_end (ap);
1161 BITMAP_FREE (argmap);
1163 return res;
1166 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1167 and the address of the save area. */
1169 static rtx
1170 expand_builtin_nonlocal_goto (tree exp)
1172 tree t_label, t_save_area;
1173 rtx r_label, r_save_area, r_fp, r_sp;
1174 rtx_insn *insn;
1176 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1177 return NULL_RTX;
1179 t_label = CALL_EXPR_ARG (exp, 0);
1180 t_save_area = CALL_EXPR_ARG (exp, 1);
1182 r_label = expand_normal (t_label);
1183 r_label = convert_memory_address (Pmode, r_label);
1184 r_save_area = expand_normal (t_save_area);
1185 r_save_area = convert_memory_address (Pmode, r_save_area);
1186 /* Copy the address of the save location to a register just in case it was
1187 based on the frame pointer. */
1188 r_save_area = copy_to_reg (r_save_area);
1189 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1190 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1191 plus_constant (Pmode, r_save_area,
1192 GET_MODE_SIZE (Pmode)));
1194 crtl->has_nonlocal_goto = 1;
1196 /* ??? We no longer need to pass the static chain value, afaik. */
1197 if (targetm.have_nonlocal_goto ())
1198 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1199 else
1201 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1202 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1204 r_label = copy_to_reg (r_label);
1206 /* Restore the frame pointer and stack pointer. We must use a
1207 temporary since the setjmp buffer may be a local. */
1208 r_fp = copy_to_reg (r_fp);
1209 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1211 /* Ensure the frame pointer move is not optimized. */
1212 emit_insn (gen_blockage ());
1213 emit_clobber (hard_frame_pointer_rtx);
1214 emit_clobber (frame_pointer_rtx);
1215 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1217 /* USE of hard_frame_pointer_rtx added for consistency;
1218 not clear if really needed. */
1219 emit_use (hard_frame_pointer_rtx);
1220 emit_use (stack_pointer_rtx);
1222 /* If the architecture is using a GP register, we must
1223 conservatively assume that the target function makes use of it.
1224 The prologue of functions with nonlocal gotos must therefore
1225 initialize the GP register to the appropriate value, and we
1226 must then make sure that this value is live at the point
1227 of the jump. (Note that this doesn't necessarily apply
1228 to targets with a nonlocal_goto pattern; they are free
1229 to implement it in their own way. Note also that this is
1230 a no-op if the GP register is a global invariant.) */
1231 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1232 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1233 emit_use (pic_offset_table_rtx);
1235 emit_indirect_jump (r_label);
1238 /* Search backwards to the jump insn and mark it as a
1239 non-local goto. */
1240 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1242 if (JUMP_P (insn))
1244 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1245 break;
1247 else if (CALL_P (insn))
1248 break;
1251 return const0_rtx;
1254 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1255 (not all will be used on all machines) that was passed to __builtin_setjmp.
1256 It updates the stack pointer in that block to the current value. This is
1257 also called directly by the SJLJ exception handling code. */
1259 void
1260 expand_builtin_update_setjmp_buf (rtx buf_addr)
1262 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1263 buf_addr = convert_memory_address (Pmode, buf_addr);
1264 rtx stack_save
1265 = gen_rtx_MEM (sa_mode,
1266 memory_address
1267 (sa_mode,
1268 plus_constant (Pmode, buf_addr,
1269 2 * GET_MODE_SIZE (Pmode))));
1271 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1274 /* Expand a call to __builtin_prefetch. For a target that does not support
1275 data prefetch, evaluate the memory address argument in case it has side
1276 effects. */
1278 static void
1279 expand_builtin_prefetch (tree exp)
1281 tree arg0, arg1, arg2;
1282 int nargs;
1283 rtx op0, op1, op2;
1285 if (!validate_arglist (exp, POINTER_TYPE, 0))
1286 return;
1288 arg0 = CALL_EXPR_ARG (exp, 0);
1290 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1291 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1292 locality). */
1293 nargs = call_expr_nargs (exp);
1294 if (nargs > 1)
1295 arg1 = CALL_EXPR_ARG (exp, 1);
1296 else
1297 arg1 = integer_zero_node;
1298 if (nargs > 2)
1299 arg2 = CALL_EXPR_ARG (exp, 2);
1300 else
1301 arg2 = integer_three_node;
1303 /* Argument 0 is an address. */
1304 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1306 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1307 if (TREE_CODE (arg1) != INTEGER_CST)
1309 error ("second argument to %<__builtin_prefetch%> must be a constant");
1310 arg1 = integer_zero_node;
1312 op1 = expand_normal (arg1);
1313 /* Argument 1 must be either zero or one. */
1314 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1316 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1317 " using zero");
1318 op1 = const0_rtx;
1321 /* Argument 2 (locality) must be a compile-time constant int. */
1322 if (TREE_CODE (arg2) != INTEGER_CST)
1324 error ("third argument to %<__builtin_prefetch%> must be a constant");
1325 arg2 = integer_zero_node;
1327 op2 = expand_normal (arg2);
1328 /* Argument 2 must be 0, 1, 2, or 3. */
1329 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1331 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1332 op2 = const0_rtx;
1335 if (targetm.have_prefetch ())
1337 class expand_operand ops[3];
1339 create_address_operand (&ops[0], op0);
1340 create_integer_operand (&ops[1], INTVAL (op1));
1341 create_integer_operand (&ops[2], INTVAL (op2));
1342 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1343 return;
1346 /* Don't do anything with direct references to volatile memory, but
1347 generate code to handle other side effects. */
1348 if (!MEM_P (op0) && side_effects_p (op0))
1349 emit_insn (op0);
1352 /* Get a MEM rtx for expression EXP which is the address of an operand
1353 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1354 the maximum length of the block of memory that might be accessed or
1355 NULL if unknown. */
1358 get_memory_rtx (tree exp, tree len)
1360 tree orig_exp = exp;
1361 rtx addr, mem;
1363 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1364 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1365 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1366 exp = TREE_OPERAND (exp, 0);
1368 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1369 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1371 /* Get an expression we can use to find the attributes to assign to MEM.
1372 First remove any nops. */
1373 while (CONVERT_EXPR_P (exp)
1374 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1375 exp = TREE_OPERAND (exp, 0);
1377 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1378 (as builtin stringops may alias with anything). */
1379 exp = fold_build2 (MEM_REF,
1380 build_array_type (char_type_node,
1381 build_range_type (sizetype,
1382 size_one_node, len)),
1383 exp, build_int_cst (ptr_type_node, 0));
1385 /* If the MEM_REF has no acceptable address, try to get the base object
1386 from the original address we got, and build an all-aliasing
1387 unknown-sized access to that one. */
1388 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1389 set_mem_attributes (mem, exp, 0);
1390 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1391 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1392 0))))
1394 exp = build_fold_addr_expr (exp);
1395 exp = fold_build2 (MEM_REF,
1396 build_array_type (char_type_node,
1397 build_range_type (sizetype,
1398 size_zero_node,
1399 NULL)),
1400 exp, build_int_cst (ptr_type_node, 0));
1401 set_mem_attributes (mem, exp, 0);
1403 set_mem_alias_set (mem, 0);
1404 return mem;
1407 /* Built-in functions to perform an untyped call and return. */
1409 #define apply_args_mode \
1410 (this_target_builtins->x_apply_args_mode)
1411 #define apply_result_mode \
1412 (this_target_builtins->x_apply_result_mode)
1414 /* Return the size required for the block returned by __builtin_apply_args,
1415 and initialize apply_args_mode. */
1417 static int
1418 apply_args_size (void)
1420 static int size = -1;
1421 int align;
1422 unsigned int regno;
1424 /* The values computed by this function never change. */
1425 if (size < 0)
1427 /* The first value is the incoming arg-pointer. */
1428 size = GET_MODE_SIZE (Pmode);
1430 /* The second value is the structure value address unless this is
1431 passed as an "invisible" first argument. */
1432 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1433 size += GET_MODE_SIZE (Pmode);
1435 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1436 if (FUNCTION_ARG_REGNO_P (regno))
1438 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1440 gcc_assert (mode != VOIDmode);
1442 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1443 if (size % align != 0)
1444 size = CEIL (size, align) * align;
1445 size += GET_MODE_SIZE (mode);
1446 apply_args_mode[regno] = mode;
1448 else
1450 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1453 return size;
1456 /* Return the size required for the block returned by __builtin_apply,
1457 and initialize apply_result_mode. */
1459 static int
1460 apply_result_size (void)
1462 static int size = -1;
1463 int align, regno;
1465 /* The values computed by this function never change. */
1466 if (size < 0)
1468 size = 0;
1470 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1471 if (targetm.calls.function_value_regno_p (regno))
1473 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1475 gcc_assert (mode != VOIDmode);
1477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1478 if (size % align != 0)
1479 size = CEIL (size, align) * align;
1480 size += GET_MODE_SIZE (mode);
1481 apply_result_mode[regno] = mode;
1483 else
1484 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1486 /* Allow targets that use untyped_call and untyped_return to override
1487 the size so that machine-specific information can be stored here. */
1488 #ifdef APPLY_RESULT_SIZE
1489 size = APPLY_RESULT_SIZE;
1490 #endif
1492 return size;
1495 /* Create a vector describing the result block RESULT. If SAVEP is true,
1496 the result block is used to save the values; otherwise it is used to
1497 restore the values. */
1499 static rtx
1500 result_vector (int savep, rtx result)
1502 int regno, size, align, nelts;
1503 fixed_size_mode mode;
1504 rtx reg, mem;
1505 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1507 size = nelts = 0;
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if ((mode = apply_result_mode[regno]) != VOIDmode)
1511 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1512 if (size % align != 0)
1513 size = CEIL (size, align) * align;
1514 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1515 mem = adjust_address (result, mode, size);
1516 savevec[nelts++] = (savep
1517 ? gen_rtx_SET (mem, reg)
1518 : gen_rtx_SET (reg, mem));
1519 size += GET_MODE_SIZE (mode);
1521 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1524 /* Save the state required to perform an untyped call with the same
1525 arguments as were passed to the current function. */
1527 static rtx
1528 expand_builtin_apply_args_1 (void)
1530 rtx registers, tem;
1531 int size, align, regno;
1532 fixed_size_mode mode;
1533 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1535 /* Create a block where the arg-pointer, structure value address,
1536 and argument registers can be saved. */
1537 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1539 /* Walk past the arg-pointer and structure value address. */
1540 size = GET_MODE_SIZE (Pmode);
1541 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1542 size += GET_MODE_SIZE (Pmode);
1544 /* Save each register used in calling a function to the block. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546 if ((mode = apply_args_mode[regno]) != VOIDmode)
1548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 if (size % align != 0)
1550 size = CEIL (size, align) * align;
1552 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1554 emit_move_insn (adjust_address (registers, mode, size), tem);
1555 size += GET_MODE_SIZE (mode);
1558 /* Save the arg pointer to the block. */
1559 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1560 /* We need the pointer as the caller actually passed them to us, not
1561 as we might have pretended they were passed. Make sure it's a valid
1562 operand, as emit_move_insn isn't expected to handle a PLUS. */
1563 if (STACK_GROWS_DOWNWARD)
1565 = force_operand (plus_constant (Pmode, tem,
1566 crtl->args.pretend_args_size),
1567 NULL_RTX);
1568 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1570 size = GET_MODE_SIZE (Pmode);
1572 /* Save the structure value address unless this is passed as an
1573 "invisible" first argument. */
1574 if (struct_incoming_value)
1575 emit_move_insn (adjust_address (registers, Pmode, size),
1576 copy_to_reg (struct_incoming_value));
1578 /* Return the address of the block. */
1579 return copy_addr_to_reg (XEXP (registers, 0));
1582 /* __builtin_apply_args returns block of memory allocated on
1583 the stack into which is stored the arg pointer, structure
1584 value address, static chain, and all the registers that might
1585 possibly be used in performing a function call. The code is
1586 moved to the start of the function so the incoming values are
1587 saved. */
1589 static rtx
1590 expand_builtin_apply_args (void)
1592 /* Don't do __builtin_apply_args more than once in a function.
1593 Save the result of the first call and reuse it. */
1594 if (apply_args_value != 0)
1595 return apply_args_value;
1597 /* When this function is called, it means that registers must be
1598 saved on entry to this function. So we migrate the
1599 call to the first insn of this function. */
1600 rtx temp;
1602 start_sequence ();
1603 temp = expand_builtin_apply_args_1 ();
1604 rtx_insn *seq = get_insns ();
1605 end_sequence ();
1607 apply_args_value = temp;
1609 /* Put the insns after the NOTE that starts the function.
1610 If this is inside a start_sequence, make the outer-level insn
1611 chain current, so the code is placed at the start of the
1612 function. If internal_arg_pointer is a non-virtual pseudo,
1613 it needs to be placed after the function that initializes
1614 that pseudo. */
1615 push_topmost_sequence ();
1616 if (REG_P (crtl->args.internal_arg_pointer)
1617 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1618 emit_insn_before (seq, parm_birth_insn);
1619 else
1620 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1621 pop_topmost_sequence ();
1622 return temp;
1626 /* Perform an untyped call and save the state required to perform an
1627 untyped return of whatever value was returned by the given function. */
1629 static rtx
1630 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1632 int size, align, regno;
1633 fixed_size_mode mode;
1634 rtx incoming_args, result, reg, dest, src;
1635 rtx_call_insn *call_insn;
1636 rtx old_stack_level = 0;
1637 rtx call_fusage = 0;
1638 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1640 arguments = convert_memory_address (Pmode, arguments);
1642 /* Create a block where the return registers can be saved. */
1643 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1645 /* Fetch the arg pointer from the ARGUMENTS block. */
1646 incoming_args = gen_reg_rtx (Pmode);
1647 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1648 if (!STACK_GROWS_DOWNWARD)
1649 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1650 incoming_args, 0, OPTAB_LIB_WIDEN);
1652 /* Push a new argument block and copy the arguments. Do not allow
1653 the (potential) memcpy call below to interfere with our stack
1654 manipulations. */
1655 do_pending_stack_adjust ();
1656 NO_DEFER_POP;
1658 /* Save the stack with nonlocal if available. */
1659 if (targetm.have_save_stack_nonlocal ())
1660 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1661 else
1662 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1664 /* Allocate a block of memory onto the stack and copy the memory
1665 arguments to the outgoing arguments address. We can pass TRUE
1666 as the 4th argument because we just saved the stack pointer
1667 and will restore it right after the call. */
1668 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1670 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1671 may have already set current_function_calls_alloca to true.
1672 current_function_calls_alloca won't be set if argsize is zero,
1673 so we have to guarantee need_drap is true here. */
1674 if (SUPPORTS_STACK_ALIGNMENT)
1675 crtl->need_drap = true;
1677 dest = virtual_outgoing_args_rtx;
1678 if (!STACK_GROWS_DOWNWARD)
1680 if (CONST_INT_P (argsize))
1681 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1682 else
1683 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1685 dest = gen_rtx_MEM (BLKmode, dest);
1686 set_mem_align (dest, PARM_BOUNDARY);
1687 src = gen_rtx_MEM (BLKmode, incoming_args);
1688 set_mem_align (src, PARM_BOUNDARY);
1689 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1691 /* Refer to the argument block. */
1692 apply_args_size ();
1693 arguments = gen_rtx_MEM (BLKmode, arguments);
1694 set_mem_align (arguments, PARM_BOUNDARY);
1696 /* Walk past the arg-pointer and structure value address. */
1697 size = GET_MODE_SIZE (Pmode);
1698 if (struct_value)
1699 size += GET_MODE_SIZE (Pmode);
1701 /* Restore each of the registers previously saved. Make USE insns
1702 for each of these registers for use in making the call. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_args_mode[regno]) != VOIDmode)
1706 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1707 if (size % align != 0)
1708 size = CEIL (size, align) * align;
1709 reg = gen_rtx_REG (mode, regno);
1710 emit_move_insn (reg, adjust_address (arguments, mode, size));
1711 use_reg (&call_fusage, reg);
1712 size += GET_MODE_SIZE (mode);
1715 /* Restore the structure value address unless this is passed as an
1716 "invisible" first argument. */
1717 size = GET_MODE_SIZE (Pmode);
1718 if (struct_value)
1720 rtx value = gen_reg_rtx (Pmode);
1721 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1722 emit_move_insn (struct_value, value);
1723 if (REG_P (struct_value))
1724 use_reg (&call_fusage, struct_value);
1727 /* All arguments and registers used for the call are set up by now! */
1728 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1730 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1731 and we don't want to load it into a register as an optimization,
1732 because prepare_call_address already did it if it should be done. */
1733 if (GET_CODE (function) != SYMBOL_REF)
1734 function = memory_address (FUNCTION_MODE, function);
1736 /* Generate the actual call instruction and save the return value. */
1737 if (targetm.have_untyped_call ())
1739 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1740 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1741 result_vector (1, result));
1742 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1743 if (CALL_P (insn))
1744 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1745 emit_insn (seq);
1747 else if (targetm.have_call_value ())
1749 rtx valreg = 0;
1751 /* Locate the unique return register. It is not possible to
1752 express a call that sets more than one return register using
1753 call_value; use untyped_call for that. In fact, untyped_call
1754 only needs to save the return registers in the given block. */
1755 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1756 if ((mode = apply_result_mode[regno]) != VOIDmode)
1758 gcc_assert (!valreg); /* have_untyped_call required. */
1760 valreg = gen_rtx_REG (mode, regno);
1763 emit_insn (targetm.gen_call_value (valreg,
1764 gen_rtx_MEM (FUNCTION_MODE, function),
1765 const0_rtx, NULL_RTX, const0_rtx));
1767 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1769 else
1770 gcc_unreachable ();
1772 /* Find the CALL insn we just emitted, and attach the register usage
1773 information. */
1774 call_insn = last_call_insn ();
1775 add_function_usage_to (call_insn, call_fusage);
1777 /* Restore the stack. */
1778 if (targetm.have_save_stack_nonlocal ())
1779 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1780 else
1781 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1782 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1784 OK_DEFER_POP;
1786 /* Return the address of the result block. */
1787 result = copy_addr_to_reg (XEXP (result, 0));
1788 return convert_memory_address (ptr_mode, result);
1791 /* Perform an untyped return. */
1793 static void
1794 expand_builtin_return (rtx result)
1796 int size, align, regno;
1797 fixed_size_mode mode;
1798 rtx reg;
1799 rtx_insn *call_fusage = 0;
1801 result = convert_memory_address (Pmode, result);
1803 apply_result_size ();
1804 result = gen_rtx_MEM (BLKmode, result);
1806 if (targetm.have_untyped_return ())
1808 rtx vector = result_vector (0, result);
1809 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1810 emit_barrier ();
1811 return;
1814 /* Restore the return value and note that each value is used. */
1815 size = 0;
1816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1817 if ((mode = apply_result_mode[regno]) != VOIDmode)
1819 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1820 if (size % align != 0)
1821 size = CEIL (size, align) * align;
1822 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1823 emit_move_insn (reg, adjust_address (result, mode, size));
1825 push_to_sequence (call_fusage);
1826 emit_use (reg);
1827 call_fusage = get_insns ();
1828 end_sequence ();
1829 size += GET_MODE_SIZE (mode);
1832 /* Put the USE insns before the return. */
1833 emit_insn (call_fusage);
1835 /* Return whatever values was restored by jumping directly to the end
1836 of the function. */
1837 expand_naked_return ();
1840 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1842 static enum type_class
1843 type_to_class (tree type)
1845 switch (TREE_CODE (type))
1847 case VOID_TYPE: return void_type_class;
1848 case INTEGER_TYPE: return integer_type_class;
1849 case ENUMERAL_TYPE: return enumeral_type_class;
1850 case BOOLEAN_TYPE: return boolean_type_class;
1851 case POINTER_TYPE: return pointer_type_class;
1852 case REFERENCE_TYPE: return reference_type_class;
1853 case OFFSET_TYPE: return offset_type_class;
1854 case REAL_TYPE: return real_type_class;
1855 case COMPLEX_TYPE: return complex_type_class;
1856 case FUNCTION_TYPE: return function_type_class;
1857 case METHOD_TYPE: return method_type_class;
1858 case RECORD_TYPE: return record_type_class;
1859 case UNION_TYPE:
1860 case QUAL_UNION_TYPE: return union_type_class;
1861 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1862 ? string_type_class : array_type_class);
1863 case LANG_TYPE: return lang_type_class;
1864 case OPAQUE_TYPE: return opaque_type_class;
1865 default: return no_type_class;
1869 /* Expand a call EXP to __builtin_classify_type. */
1871 static rtx
1872 expand_builtin_classify_type (tree exp)
1874 if (call_expr_nargs (exp))
1875 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1876 return GEN_INT (no_type_class);
1879 /* This helper macro, meant to be used in mathfn_built_in below, determines
1880 which among a set of builtin math functions is appropriate for a given type
1881 mode. The `F' (float) and `L' (long double) are automatically generated
1882 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1883 types, there are additional types that are considered with 'F32', 'F64',
1884 'F128', etc. suffixes. */
1885 #define CASE_MATHFN(MATHFN) \
1886 CASE_CFN_##MATHFN: \
1887 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1888 fcodel = BUILT_IN_##MATHFN##L ; break;
1889 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1890 types. */
1891 #define CASE_MATHFN_FLOATN(MATHFN) \
1892 CASE_CFN_##MATHFN: \
1893 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1894 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1895 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1896 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1897 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1898 break;
1899 /* Similar to above, but appends _R after any F/L suffix. */
1900 #define CASE_MATHFN_REENT(MATHFN) \
1901 case CFN_BUILT_IN_##MATHFN##_R: \
1902 case CFN_BUILT_IN_##MATHFN##F_R: \
1903 case CFN_BUILT_IN_##MATHFN##L_R: \
1904 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1905 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1907 /* Return a function equivalent to FN but operating on floating-point
1908 values of type TYPE, or END_BUILTINS if no such function exists.
1909 This is purely an operation on function codes; it does not guarantee
1910 that the target actually has an implementation of the function. */
1912 static built_in_function
1913 mathfn_built_in_2 (tree type, combined_fn fn)
1915 tree mtype;
1916 built_in_function fcode, fcodef, fcodel;
1917 built_in_function fcodef16 = END_BUILTINS;
1918 built_in_function fcodef32 = END_BUILTINS;
1919 built_in_function fcodef64 = END_BUILTINS;
1920 built_in_function fcodef128 = END_BUILTINS;
1921 built_in_function fcodef32x = END_BUILTINS;
1922 built_in_function fcodef64x = END_BUILTINS;
1923 built_in_function fcodef128x = END_BUILTINS;
1925 switch (fn)
1927 #define SEQ_OF_CASE_MATHFN \
1928 CASE_MATHFN (ACOS) \
1929 CASE_MATHFN (ACOSH) \
1930 CASE_MATHFN (ASIN) \
1931 CASE_MATHFN (ASINH) \
1932 CASE_MATHFN (ATAN) \
1933 CASE_MATHFN (ATAN2) \
1934 CASE_MATHFN (ATANH) \
1935 CASE_MATHFN (CBRT) \
1936 CASE_MATHFN_FLOATN (CEIL) \
1937 CASE_MATHFN (CEXPI) \
1938 CASE_MATHFN_FLOATN (COPYSIGN) \
1939 CASE_MATHFN (COS) \
1940 CASE_MATHFN (COSH) \
1941 CASE_MATHFN (DREM) \
1942 CASE_MATHFN (ERF) \
1943 CASE_MATHFN (ERFC) \
1944 CASE_MATHFN (EXP) \
1945 CASE_MATHFN (EXP10) \
1946 CASE_MATHFN (EXP2) \
1947 CASE_MATHFN (EXPM1) \
1948 CASE_MATHFN (FABS) \
1949 CASE_MATHFN (FDIM) \
1950 CASE_MATHFN_FLOATN (FLOOR) \
1951 CASE_MATHFN_FLOATN (FMA) \
1952 CASE_MATHFN_FLOATN (FMAX) \
1953 CASE_MATHFN_FLOATN (FMIN) \
1954 CASE_MATHFN (FMOD) \
1955 CASE_MATHFN (FREXP) \
1956 CASE_MATHFN (GAMMA) \
1957 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1958 CASE_MATHFN (HUGE_VAL) \
1959 CASE_MATHFN (HYPOT) \
1960 CASE_MATHFN (ILOGB) \
1961 CASE_MATHFN (ICEIL) \
1962 CASE_MATHFN (IFLOOR) \
1963 CASE_MATHFN (INF) \
1964 CASE_MATHFN (IRINT) \
1965 CASE_MATHFN (IROUND) \
1966 CASE_MATHFN (ISINF) \
1967 CASE_MATHFN (J0) \
1968 CASE_MATHFN (J1) \
1969 CASE_MATHFN (JN) \
1970 CASE_MATHFN (LCEIL) \
1971 CASE_MATHFN (LDEXP) \
1972 CASE_MATHFN (LFLOOR) \
1973 CASE_MATHFN (LGAMMA) \
1974 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1975 CASE_MATHFN (LLCEIL) \
1976 CASE_MATHFN (LLFLOOR) \
1977 CASE_MATHFN (LLRINT) \
1978 CASE_MATHFN (LLROUND) \
1979 CASE_MATHFN (LOG) \
1980 CASE_MATHFN (LOG10) \
1981 CASE_MATHFN (LOG1P) \
1982 CASE_MATHFN (LOG2) \
1983 CASE_MATHFN (LOGB) \
1984 CASE_MATHFN (LRINT) \
1985 CASE_MATHFN (LROUND) \
1986 CASE_MATHFN (MODF) \
1987 CASE_MATHFN (NAN) \
1988 CASE_MATHFN (NANS) \
1989 CASE_MATHFN_FLOATN (NEARBYINT) \
1990 CASE_MATHFN (NEXTAFTER) \
1991 CASE_MATHFN (NEXTTOWARD) \
1992 CASE_MATHFN (POW) \
1993 CASE_MATHFN (POWI) \
1994 CASE_MATHFN (POW10) \
1995 CASE_MATHFN (REMAINDER) \
1996 CASE_MATHFN (REMQUO) \
1997 CASE_MATHFN_FLOATN (RINT) \
1998 CASE_MATHFN_FLOATN (ROUND) \
1999 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2000 CASE_MATHFN (SCALB) \
2001 CASE_MATHFN (SCALBLN) \
2002 CASE_MATHFN (SCALBN) \
2003 CASE_MATHFN (SIGNBIT) \
2004 CASE_MATHFN (SIGNIFICAND) \
2005 CASE_MATHFN (SIN) \
2006 CASE_MATHFN (SINCOS) \
2007 CASE_MATHFN (SINH) \
2008 CASE_MATHFN_FLOATN (SQRT) \
2009 CASE_MATHFN (TAN) \
2010 CASE_MATHFN (TANH) \
2011 CASE_MATHFN (TGAMMA) \
2012 CASE_MATHFN_FLOATN (TRUNC) \
2013 CASE_MATHFN (Y0) \
2014 CASE_MATHFN (Y1) \
2015 CASE_MATHFN (YN)
2017 SEQ_OF_CASE_MATHFN
2019 default:
2020 return END_BUILTINS;
2023 mtype = TYPE_MAIN_VARIANT (type);
2024 if (mtype == double_type_node)
2025 return fcode;
2026 else if (mtype == float_type_node)
2027 return fcodef;
2028 else if (mtype == long_double_type_node)
2029 return fcodel;
2030 else if (mtype == float16_type_node)
2031 return fcodef16;
2032 else if (mtype == float32_type_node)
2033 return fcodef32;
2034 else if (mtype == float64_type_node)
2035 return fcodef64;
2036 else if (mtype == float128_type_node)
2037 return fcodef128;
2038 else if (mtype == float32x_type_node)
2039 return fcodef32x;
2040 else if (mtype == float64x_type_node)
2041 return fcodef64x;
2042 else if (mtype == float128x_type_node)
2043 return fcodef128x;
2044 else
2045 return END_BUILTINS;
2048 #undef CASE_MATHFN
2049 #undef CASE_MATHFN_FLOATN
2050 #undef CASE_MATHFN_REENT
2052 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2053 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2054 otherwise use the explicit declaration. If we can't do the conversion,
2055 return null. */
2057 static tree
2058 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2060 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2061 if (fcode2 == END_BUILTINS)
2062 return NULL_TREE;
2064 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2065 return NULL_TREE;
2067 return builtin_decl_explicit (fcode2);
2070 /* Like mathfn_built_in_1, but always use the implicit array. */
2072 tree
2073 mathfn_built_in (tree type, combined_fn fn)
2075 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2078 /* Like mathfn_built_in_1, but take a built_in_function and
2079 always use the implicit array. */
2081 tree
2082 mathfn_built_in (tree type, enum built_in_function fn)
2084 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2087 /* Return the type associated with a built in function, i.e., the one
2088 to be passed to mathfn_built_in to get the type-specific
2089 function. */
2091 tree
2092 mathfn_built_in_type (combined_fn fn)
2094 #define CASE_MATHFN(MATHFN) \
2095 case CFN_BUILT_IN_##MATHFN: \
2096 return double_type_node; \
2097 case CFN_BUILT_IN_##MATHFN##F: \
2098 return float_type_node; \
2099 case CFN_BUILT_IN_##MATHFN##L: \
2100 return long_double_type_node;
2102 #define CASE_MATHFN_FLOATN(MATHFN) \
2103 CASE_MATHFN(MATHFN) \
2104 case CFN_BUILT_IN_##MATHFN##F16: \
2105 return float16_type_node; \
2106 case CFN_BUILT_IN_##MATHFN##F32: \
2107 return float32_type_node; \
2108 case CFN_BUILT_IN_##MATHFN##F64: \
2109 return float64_type_node; \
2110 case CFN_BUILT_IN_##MATHFN##F128: \
2111 return float128_type_node; \
2112 case CFN_BUILT_IN_##MATHFN##F32X: \
2113 return float32x_type_node; \
2114 case CFN_BUILT_IN_##MATHFN##F64X: \
2115 return float64x_type_node; \
2116 case CFN_BUILT_IN_##MATHFN##F128X: \
2117 return float128x_type_node;
2119 /* Similar to above, but appends _R after any F/L suffix. */
2120 #define CASE_MATHFN_REENT(MATHFN) \
2121 case CFN_BUILT_IN_##MATHFN##_R: \
2122 return double_type_node; \
2123 case CFN_BUILT_IN_##MATHFN##F_R: \
2124 return float_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##L_R: \
2126 return long_double_type_node;
2128 switch (fn)
2130 SEQ_OF_CASE_MATHFN
2132 default:
2133 return NULL_TREE;
2136 #undef CASE_MATHFN
2137 #undef CASE_MATHFN_FLOATN
2138 #undef CASE_MATHFN_REENT
2139 #undef SEQ_OF_CASE_MATHFN
2142 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2143 return its code, otherwise return IFN_LAST. Note that this function
2144 only tests whether the function is defined in internals.def, not whether
2145 it is actually available on the target. */
2147 internal_fn
2148 associated_internal_fn (tree fndecl)
2150 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2151 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2152 switch (DECL_FUNCTION_CODE (fndecl))
2154 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2155 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2156 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2157 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2158 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2159 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #include "internal-fn.def"
2163 CASE_FLT_FN (BUILT_IN_POW10):
2164 return IFN_EXP10;
2166 CASE_FLT_FN (BUILT_IN_DREM):
2167 return IFN_REMAINDER;
2169 CASE_FLT_FN (BUILT_IN_SCALBN):
2170 CASE_FLT_FN (BUILT_IN_SCALBLN):
2171 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2172 return IFN_LDEXP;
2173 return IFN_LAST;
2175 default:
2176 return IFN_LAST;
2180 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2181 on the current target by a call to an internal function, return the
2182 code of that internal function, otherwise return IFN_LAST. The caller
2183 is responsible for ensuring that any side-effects of the built-in
2184 call are dealt with correctly. E.g. if CALL sets errno, the caller
2185 must decide that the errno result isn't needed or make it available
2186 in some other way. */
2188 internal_fn
2189 replacement_internal_fn (gcall *call)
2191 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2193 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2194 if (ifn != IFN_LAST)
2196 tree_pair types = direct_internal_fn_types (ifn, call);
2197 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2198 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2199 return ifn;
2202 return IFN_LAST;
2205 /* Expand a call to the builtin trinary math functions (fma).
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2210 operands. */
2212 static rtx
2213 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2215 optab builtin_optab;
2216 rtx op0, op1, op2, result;
2217 rtx_insn *insns;
2218 tree fndecl = get_callee_fndecl (exp);
2219 tree arg0, arg1, arg2;
2220 machine_mode mode;
2222 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX;
2225 arg0 = CALL_EXPR_ARG (exp, 0);
2226 arg1 = CALL_EXPR_ARG (exp, 1);
2227 arg2 = CALL_EXPR_ARG (exp, 2);
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_FMA):
2232 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2233 builtin_optab = fma_optab; break;
2234 default:
2235 gcc_unreachable ();
2238 /* Make a suitable register to place result in. */
2239 mode = TYPE_MODE (TREE_TYPE (exp));
2241 /* Before working hard, check whether the instruction is available. */
2242 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2243 return NULL_RTX;
2245 result = gen_reg_rtx (mode);
2247 /* Always stabilize the argument list. */
2248 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2249 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2250 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2252 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2253 op1 = expand_normal (arg1);
2254 op2 = expand_normal (arg2);
2256 start_sequence ();
2258 /* Compute into RESULT.
2259 Set RESULT to wherever the result comes back. */
2260 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2261 result, 0);
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 if (result == 0)
2268 end_sequence ();
2269 return expand_call (exp, target, target == const0_rtx);
2272 /* Output the entire sequence. */
2273 insns = get_insns ();
2274 end_sequence ();
2275 emit_insn (insns);
2277 return result;
2280 /* Expand a call to the builtin sin and cos math functions.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function; if convenient, the result should be placed in TARGET.
2284 SUBTARGET may be used as the target for computing one of EXP's
2285 operands. */
2287 static rtx
2288 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2290 optab builtin_optab;
2291 rtx op0;
2292 rtx_insn *insns;
2293 tree fndecl = get_callee_fndecl (exp);
2294 machine_mode mode;
2295 tree arg;
2297 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2298 return NULL_RTX;
2300 arg = CALL_EXPR_ARG (exp, 0);
2302 switch (DECL_FUNCTION_CODE (fndecl))
2304 CASE_FLT_FN (BUILT_IN_SIN):
2305 CASE_FLT_FN (BUILT_IN_COS):
2306 builtin_optab = sincos_optab; break;
2307 default:
2308 gcc_unreachable ();
2311 /* Make a suitable register to place result in. */
2312 mode = TYPE_MODE (TREE_TYPE (exp));
2314 /* Check if sincos insn is available, otherwise fallback
2315 to sin or cos insn. */
2316 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2317 switch (DECL_FUNCTION_CODE (fndecl))
2319 CASE_FLT_FN (BUILT_IN_SIN):
2320 builtin_optab = sin_optab; break;
2321 CASE_FLT_FN (BUILT_IN_COS):
2322 builtin_optab = cos_optab; break;
2323 default:
2324 gcc_unreachable ();
2327 /* Before working hard, check whether the instruction is available. */
2328 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2330 rtx result = gen_reg_rtx (mode);
2332 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2333 need to expand the argument again. This way, we will not perform
2334 side-effects more the once. */
2335 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2337 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2339 start_sequence ();
2341 /* Compute into RESULT.
2342 Set RESULT to wherever the result comes back. */
2343 if (builtin_optab == sincos_optab)
2345 int ok;
2347 switch (DECL_FUNCTION_CODE (fndecl))
2349 CASE_FLT_FN (BUILT_IN_SIN):
2350 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2351 break;
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2354 break;
2355 default:
2356 gcc_unreachable ();
2358 gcc_assert (ok);
2360 else
2361 result = expand_unop (mode, builtin_optab, op0, result, 0);
2363 if (result != 0)
2365 /* Output the entire sequence. */
2366 insns = get_insns ();
2367 end_sequence ();
2368 emit_insn (insns);
2369 return result;
2372 /* If we were unable to expand via the builtin, stop the sequence
2373 (without outputting the insns) and call to the library function
2374 with the stabilized argument list. */
2375 end_sequence ();
2378 return expand_call (exp, target, target == const0_rtx);
2381 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2382 return an RTL instruction code that implements the functionality.
2383 If that isn't possible or available return CODE_FOR_nothing. */
2385 static enum insn_code
2386 interclass_mathfn_icode (tree arg, tree fndecl)
2388 bool errno_set = false;
2389 optab builtin_optab = unknown_optab;
2390 machine_mode mode;
2392 switch (DECL_FUNCTION_CODE (fndecl))
2394 CASE_FLT_FN (BUILT_IN_ILOGB):
2395 errno_set = true; builtin_optab = ilogb_optab; break;
2396 CASE_FLT_FN (BUILT_IN_ISINF):
2397 builtin_optab = isinf_optab; break;
2398 case BUILT_IN_ISNORMAL:
2399 case BUILT_IN_ISFINITE:
2400 CASE_FLT_FN (BUILT_IN_FINITE):
2401 case BUILT_IN_FINITED32:
2402 case BUILT_IN_FINITED64:
2403 case BUILT_IN_FINITED128:
2404 case BUILT_IN_ISINFD32:
2405 case BUILT_IN_ISINFD64:
2406 case BUILT_IN_ISINFD128:
2407 /* These builtins have no optabs (yet). */
2408 break;
2409 default:
2410 gcc_unreachable ();
2413 /* There's no easy way to detect the case we need to set EDOM. */
2414 if (flag_errno_math && errno_set)
2415 return CODE_FOR_nothing;
2417 /* Optab mode depends on the mode of the input argument. */
2418 mode = TYPE_MODE (TREE_TYPE (arg));
2420 if (builtin_optab)
2421 return optab_handler (builtin_optab, mode);
2422 return CODE_FOR_nothing;
2425 /* Expand a call to one of the builtin math functions that operate on
2426 floating point argument and output an integer result (ilogb, isinf,
2427 isnan, etc).
2428 Return 0 if a normal call should be emitted rather than expanding the
2429 function in-line. EXP is the expression that is a call to the builtin
2430 function; if convenient, the result should be placed in TARGET. */
2432 static rtx
2433 expand_builtin_interclass_mathfn (tree exp, rtx target)
2435 enum insn_code icode = CODE_FOR_nothing;
2436 rtx op0;
2437 tree fndecl = get_callee_fndecl (exp);
2438 machine_mode mode;
2439 tree arg;
2441 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2442 return NULL_RTX;
2444 arg = CALL_EXPR_ARG (exp, 0);
2445 icode = interclass_mathfn_icode (arg, fndecl);
2446 mode = TYPE_MODE (TREE_TYPE (arg));
2448 if (icode != CODE_FOR_nothing)
2450 class expand_operand ops[1];
2451 rtx_insn *last = get_last_insn ();
2452 tree orig_arg = arg;
2454 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2455 need to expand the argument again. This way, we will not perform
2456 side-effects more the once. */
2457 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2459 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2461 if (mode != GET_MODE (op0))
2462 op0 = convert_to_mode (mode, op0, 0);
2464 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2465 if (maybe_legitimize_operands (icode, 0, 1, ops)
2466 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2467 return ops[0].value;
2469 delete_insns_since (last);
2470 CALL_EXPR_ARG (exp, 0) = orig_arg;
2473 return NULL_RTX;
2476 /* Expand a call to the builtin sincos math function.
2477 Return NULL_RTX if a normal call should be emitted rather than expanding the
2478 function in-line. EXP is the expression that is a call to the builtin
2479 function. */
2481 static rtx
2482 expand_builtin_sincos (tree exp)
2484 rtx op0, op1, op2, target1, target2;
2485 machine_mode mode;
2486 tree arg, sinp, cosp;
2487 int result;
2488 location_t loc = EXPR_LOCATION (exp);
2489 tree alias_type, alias_off;
2491 if (!validate_arglist (exp, REAL_TYPE,
2492 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2493 return NULL_RTX;
2495 arg = CALL_EXPR_ARG (exp, 0);
2496 sinp = CALL_EXPR_ARG (exp, 1);
2497 cosp = CALL_EXPR_ARG (exp, 2);
2499 /* Make a suitable register to place result in. */
2500 mode = TYPE_MODE (TREE_TYPE (arg));
2502 /* Check if sincos insn is available, otherwise emit the call. */
2503 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2504 return NULL_RTX;
2506 target1 = gen_reg_rtx (mode);
2507 target2 = gen_reg_rtx (mode);
2509 op0 = expand_normal (arg);
2510 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2511 alias_off = build_int_cst (alias_type, 0);
2512 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2513 sinp, alias_off));
2514 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2515 cosp, alias_off));
2517 /* Compute into target1 and target2.
2518 Set TARGET to wherever the result comes back. */
2519 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2520 gcc_assert (result);
2522 /* Move target1 and target2 to the memory locations indicated
2523 by op1 and op2. */
2524 emit_move_insn (op1, target1);
2525 emit_move_insn (op2, target2);
2527 return const0_rtx;
2530 /* Expand a call to the internal cexpi builtin to the sincos math function.
2531 EXP is the expression that is a call to the builtin function; if convenient,
2532 the result should be placed in TARGET. */
2534 static rtx
2535 expand_builtin_cexpi (tree exp, rtx target)
2537 tree fndecl = get_callee_fndecl (exp);
2538 tree arg, type;
2539 machine_mode mode;
2540 rtx op0, op1, op2;
2541 location_t loc = EXPR_LOCATION (exp);
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544 return NULL_RTX;
2546 arg = CALL_EXPR_ARG (exp, 0);
2547 type = TREE_TYPE (arg);
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2550 /* Try expanding via a sincos optab, fall back to emitting a libcall
2551 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2552 is only generated from sincos, cexp or if we have either of them. */
2553 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2555 op1 = gen_reg_rtx (mode);
2556 op2 = gen_reg_rtx (mode);
2558 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2560 /* Compute into op1 and op2. */
2561 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2563 else if (targetm.libc_has_function (function_sincos, type))
2565 tree call, fn = NULL_TREE;
2566 tree top1, top2;
2567 rtx op1a, op2a;
2569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2570 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2571 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2572 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2573 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2574 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2575 else
2576 gcc_unreachable ();
2578 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2579 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2580 op1a = copy_addr_to_reg (XEXP (op1, 0));
2581 op2a = copy_addr_to_reg (XEXP (op2, 0));
2582 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2583 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2585 /* Make sure not to fold the sincos call again. */
2586 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2587 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2588 call, 3, arg, top1, top2));
2590 else
2592 tree call, fn = NULL_TREE, narg;
2593 tree ctype = build_complex_type (type);
2595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2596 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2598 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2599 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2600 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2601 else
2602 gcc_unreachable ();
2604 /* If we don't have a decl for cexp create one. This is the
2605 friendliest fallback if the user calls __builtin_cexpi
2606 without full target C99 function support. */
2607 if (fn == NULL_TREE)
2609 tree fntype;
2610 const char *name = NULL;
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 name = "cexpf";
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 name = "cexp";
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 name = "cexpl";
2619 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2620 fn = build_fn_decl (name, fntype);
2623 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2624 build_real (type, dconst0), arg);
2626 /* Make sure not to fold the cexp call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2628 return expand_expr (build_call_nary (ctype, call, 1, narg),
2629 target, VOIDmode, EXPAND_NORMAL);
2632 /* Now build the proper return type. */
2633 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2634 make_tree (TREE_TYPE (arg), op2),
2635 make_tree (TREE_TYPE (arg), op1)),
2636 target, VOIDmode, EXPAND_NORMAL);
2639 /* Conveniently construct a function call expression. FNDECL names the
2640 function to be called, N is the number of arguments, and the "..."
2641 parameters are the argument expressions. Unlike build_call_exr
2642 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2644 static tree
2645 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2647 va_list ap;
2648 tree fntype = TREE_TYPE (fndecl);
2649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2651 va_start (ap, n);
2652 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2653 va_end (ap);
2654 SET_EXPR_LOCATION (fn, loc);
2655 return fn;
2658 /* Expand a call to one of the builtin rounding functions gcc defines
2659 as an extension (lfloor and lceil). As these are gcc extensions we
2660 do not need to worry about setting errno to EDOM.
2661 If expanding via optab fails, lower expression to (int)(floor(x)).
2662 EXP is the expression that is a call to the builtin function;
2663 if convenient, the result should be placed in TARGET. */
2665 static rtx
2666 expand_builtin_int_roundingfn (tree exp, rtx target)
2668 convert_optab builtin_optab;
2669 rtx op0, tmp;
2670 rtx_insn *insns;
2671 tree fndecl = get_callee_fndecl (exp);
2672 enum built_in_function fallback_fn;
2673 tree fallback_fndecl;
2674 machine_mode mode;
2675 tree arg;
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2678 return NULL_RTX;
2680 arg = CALL_EXPR_ARG (exp, 0);
2682 switch (DECL_FUNCTION_CODE (fndecl))
2684 CASE_FLT_FN (BUILT_IN_ICEIL):
2685 CASE_FLT_FN (BUILT_IN_LCEIL):
2686 CASE_FLT_FN (BUILT_IN_LLCEIL):
2687 builtin_optab = lceil_optab;
2688 fallback_fn = BUILT_IN_CEIL;
2689 break;
2691 CASE_FLT_FN (BUILT_IN_IFLOOR):
2692 CASE_FLT_FN (BUILT_IN_LFLOOR):
2693 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2694 builtin_optab = lfloor_optab;
2695 fallback_fn = BUILT_IN_FLOOR;
2696 break;
2698 default:
2699 gcc_unreachable ();
2702 /* Make a suitable register to place result in. */
2703 mode = TYPE_MODE (TREE_TYPE (exp));
2705 target = gen_reg_rtx (mode);
2707 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2708 need to expand the argument again. This way, we will not perform
2709 side-effects more the once. */
2710 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2712 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2714 start_sequence ();
2716 /* Compute into TARGET. */
2717 if (expand_sfix_optab (target, op0, builtin_optab))
2719 /* Output the entire sequence. */
2720 insns = get_insns ();
2721 end_sequence ();
2722 emit_insn (insns);
2723 return target;
2726 /* If we were unable to expand via the builtin, stop the sequence
2727 (without outputting the insns). */
2728 end_sequence ();
2730 /* Fall back to floating point rounding optab. */
2731 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2733 /* For non-C99 targets we may end up without a fallback fndecl here
2734 if the user called __builtin_lfloor directly. In this case emit
2735 a call to the floor/ceil variants nevertheless. This should result
2736 in the best user experience for not full C99 targets. */
2737 if (fallback_fndecl == NULL_TREE)
2739 tree fntype;
2740 const char *name = NULL;
2742 switch (DECL_FUNCTION_CODE (fndecl))
2744 case BUILT_IN_ICEIL:
2745 case BUILT_IN_LCEIL:
2746 case BUILT_IN_LLCEIL:
2747 name = "ceil";
2748 break;
2749 case BUILT_IN_ICEILF:
2750 case BUILT_IN_LCEILF:
2751 case BUILT_IN_LLCEILF:
2752 name = "ceilf";
2753 break;
2754 case BUILT_IN_ICEILL:
2755 case BUILT_IN_LCEILL:
2756 case BUILT_IN_LLCEILL:
2757 name = "ceill";
2758 break;
2759 case BUILT_IN_IFLOOR:
2760 case BUILT_IN_LFLOOR:
2761 case BUILT_IN_LLFLOOR:
2762 name = "floor";
2763 break;
2764 case BUILT_IN_IFLOORF:
2765 case BUILT_IN_LFLOORF:
2766 case BUILT_IN_LLFLOORF:
2767 name = "floorf";
2768 break;
2769 case BUILT_IN_IFLOORL:
2770 case BUILT_IN_LFLOORL:
2771 case BUILT_IN_LLFLOORL:
2772 name = "floorl";
2773 break;
2774 default:
2775 gcc_unreachable ();
2778 fntype = build_function_type_list (TREE_TYPE (arg),
2779 TREE_TYPE (arg), NULL_TREE);
2780 fallback_fndecl = build_fn_decl (name, fntype);
2783 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2785 tmp = expand_normal (exp);
2786 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2788 /* Truncate the result of floating point optab to integer
2789 via expand_fix (). */
2790 target = gen_reg_rtx (mode);
2791 expand_fix (target, tmp, 0);
2793 return target;
2796 /* Expand a call to one of the builtin math functions doing integer
2797 conversion (lrint).
2798 Return 0 if a normal call should be emitted rather than expanding the
2799 function in-line. EXP is the expression that is a call to the builtin
2800 function; if convenient, the result should be placed in TARGET. */
2802 static rtx
2803 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2805 convert_optab builtin_optab;
2806 rtx op0;
2807 rtx_insn *insns;
2808 tree fndecl = get_callee_fndecl (exp);
2809 tree arg;
2810 machine_mode mode;
2811 enum built_in_function fallback_fn = BUILT_IN_NONE;
2813 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2814 return NULL_RTX;
2816 arg = CALL_EXPR_ARG (exp, 0);
2818 switch (DECL_FUNCTION_CODE (fndecl))
2820 CASE_FLT_FN (BUILT_IN_IRINT):
2821 fallback_fn = BUILT_IN_LRINT;
2822 gcc_fallthrough ();
2823 CASE_FLT_FN (BUILT_IN_LRINT):
2824 CASE_FLT_FN (BUILT_IN_LLRINT):
2825 builtin_optab = lrint_optab;
2826 break;
2828 CASE_FLT_FN (BUILT_IN_IROUND):
2829 fallback_fn = BUILT_IN_LROUND;
2830 gcc_fallthrough ();
2831 CASE_FLT_FN (BUILT_IN_LROUND):
2832 CASE_FLT_FN (BUILT_IN_LLROUND):
2833 builtin_optab = lround_optab;
2834 break;
2836 default:
2837 gcc_unreachable ();
2840 /* There's no easy way to detect the case we need to set EDOM. */
2841 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2842 return NULL_RTX;
2844 /* Make a suitable register to place result in. */
2845 mode = TYPE_MODE (TREE_TYPE (exp));
2847 /* There's no easy way to detect the case we need to set EDOM. */
2848 if (!flag_errno_math)
2850 rtx result = gen_reg_rtx (mode);
2852 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2853 need to expand the argument again. This way, we will not perform
2854 side-effects more the once. */
2855 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2857 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2859 start_sequence ();
2861 if (expand_sfix_optab (result, op0, builtin_optab))
2863 /* Output the entire sequence. */
2864 insns = get_insns ();
2865 end_sequence ();
2866 emit_insn (insns);
2867 return result;
2870 /* If we were unable to expand via the builtin, stop the sequence
2871 (without outputting the insns) and call to the library function
2872 with the stabilized argument list. */
2873 end_sequence ();
2876 if (fallback_fn != BUILT_IN_NONE)
2878 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2879 targets, (int) round (x) should never be transformed into
2880 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2881 a call to lround in the hope that the target provides at least some
2882 C99 functions. This should result in the best user experience for
2883 not full C99 targets. */
2884 tree fallback_fndecl = mathfn_built_in_1
2885 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2887 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2888 fallback_fndecl, 1, arg);
2890 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2891 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2892 return convert_to_mode (mode, target, 0);
2895 return expand_call (exp, target, target == const0_rtx);
2898 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2899 a normal call should be emitted rather than expanding the function
2900 in-line. EXP is the expression that is a call to the builtin
2901 function; if convenient, the result should be placed in TARGET. */
2903 static rtx
2904 expand_builtin_powi (tree exp, rtx target)
2906 tree arg0, arg1;
2907 rtx op0, op1;
2908 machine_mode mode;
2909 machine_mode mode2;
2911 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2912 return NULL_RTX;
2914 arg0 = CALL_EXPR_ARG (exp, 0);
2915 arg1 = CALL_EXPR_ARG (exp, 1);
2916 mode = TYPE_MODE (TREE_TYPE (exp));
2918 /* Emit a libcall to libgcc. */
2920 /* Mode of the 2nd argument must match that of an int. */
2921 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2923 if (target == NULL_RTX)
2924 target = gen_reg_rtx (mode);
2926 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2927 if (GET_MODE (op0) != mode)
2928 op0 = convert_to_mode (mode, op0, 0);
2929 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2930 if (GET_MODE (op1) != mode2)
2931 op1 = convert_to_mode (mode2, op1, 0);
2933 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2934 target, LCT_CONST, mode,
2935 op0, mode, op1, mode2);
2937 return target;
2940 /* Expand expression EXP which is a call to the strlen builtin. Return
2941 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2942 try to get the result in TARGET, if convenient. */
2944 static rtx
2945 expand_builtin_strlen (tree exp, rtx target,
2946 machine_mode target_mode)
2948 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2949 return NULL_RTX;
2951 tree src = CALL_EXPR_ARG (exp, 0);
2953 /* If the length can be computed at compile-time, return it. */
2954 if (tree len = c_strlen (src, 0))
2955 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2957 /* If the length can be computed at compile-time and is constant
2958 integer, but there are side-effects in src, evaluate
2959 src for side-effects, then return len.
2960 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2961 can be optimized into: i++; x = 3; */
2962 tree len = c_strlen (src, 1);
2963 if (len && TREE_CODE (len) == INTEGER_CST)
2965 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2966 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2969 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
2971 /* If SRC is not a pointer type, don't do this operation inline. */
2972 if (align == 0)
2973 return NULL_RTX;
2975 /* Bail out if we can't compute strlen in the right mode. */
2976 machine_mode insn_mode;
2977 enum insn_code icode = CODE_FOR_nothing;
2978 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2980 icode = optab_handler (strlen_optab, insn_mode);
2981 if (icode != CODE_FOR_nothing)
2982 break;
2984 if (insn_mode == VOIDmode)
2985 return NULL_RTX;
2987 /* Make a place to hold the source address. We will not expand
2988 the actual source until we are sure that the expansion will
2989 not fail -- there are trees that cannot be expanded twice. */
2990 rtx src_reg = gen_reg_rtx (Pmode);
2992 /* Mark the beginning of the strlen sequence so we can emit the
2993 source operand later. */
2994 rtx_insn *before_strlen = get_last_insn ();
2996 class expand_operand ops[4];
2997 create_output_operand (&ops[0], target, insn_mode);
2998 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2999 create_integer_operand (&ops[2], 0);
3000 create_integer_operand (&ops[3], align);
3001 if (!maybe_expand_insn (icode, 4, ops))
3002 return NULL_RTX;
3004 /* Check to see if the argument was declared attribute nonstring
3005 and if so, issue a warning since at this point it's not known
3006 to be nul-terminated. */
3007 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3009 /* Now that we are assured of success, expand the source. */
3010 start_sequence ();
3011 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3012 if (pat != src_reg)
3014 #ifdef POINTERS_EXTEND_UNSIGNED
3015 if (GET_MODE (pat) != Pmode)
3016 pat = convert_to_mode (Pmode, pat,
3017 POINTERS_EXTEND_UNSIGNED);
3018 #endif
3019 emit_move_insn (src_reg, pat);
3021 pat = get_insns ();
3022 end_sequence ();
3024 if (before_strlen)
3025 emit_insn_after (pat, before_strlen);
3026 else
3027 emit_insn_before (pat, get_insns ());
3029 /* Return the value in the proper mode for this function. */
3030 if (GET_MODE (ops[0].value) == target_mode)
3031 target = ops[0].value;
3032 else if (target != 0)
3033 convert_move (target, ops[0].value, 0);
3034 else
3035 target = convert_to_mode (target_mode, ops[0].value, 0);
3037 return target;
3040 /* Expand call EXP to the strnlen built-in, returning the result
3041 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3043 static rtx
3044 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3046 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3049 tree src = CALL_EXPR_ARG (exp, 0);
3050 tree bound = CALL_EXPR_ARG (exp, 1);
3052 if (!bound)
3053 return NULL_RTX;
3055 location_t loc = UNKNOWN_LOCATION;
3056 if (EXPR_HAS_LOCATION (exp))
3057 loc = EXPR_LOCATION (exp);
3059 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3060 so these conversions aren't necessary. */
3061 c_strlen_data lendata = { };
3062 tree len = c_strlen (src, 0, &lendata, 1);
3063 if (len)
3064 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3066 if (TREE_CODE (bound) == INTEGER_CST)
3068 if (!len)
3069 return NULL_RTX;
3071 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3072 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3075 if (TREE_CODE (bound) != SSA_NAME)
3076 return NULL_RTX;
3078 wide_int min, max;
3079 value_range r;
3080 get_global_range_query ()->range_of_expr (r, bound);
3081 if (r.kind () != VR_RANGE)
3082 return NULL_RTX;
3083 min = r.lower_bound ();
3084 max = r.upper_bound ();
3086 if (!len || TREE_CODE (len) != INTEGER_CST)
3088 bool exact;
3089 lendata.decl = unterminated_array (src, &len, &exact);
3090 if (!lendata.decl)
3091 return NULL_RTX;
3094 if (lendata.decl)
3095 return NULL_RTX;
3097 if (wi::gtu_p (min, wi::to_wide (len)))
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3100 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3101 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3104 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3105 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3106 a target constant. */
3108 static rtx
3109 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3110 fixed_size_mode mode)
3112 /* The REPresentation pointed to by DATA need not be a nul-terminated
3113 string but the caller guarantees it's large enough for MODE. */
3114 const char *rep = (const char *) data;
3116 /* The by-pieces infrastructure does not try to pick a vector mode
3117 for memcpy expansion. */
3118 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3119 /*nul_terminated=*/false);
3122 /* LEN specify length of the block of memcpy/memset operation.
3123 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3124 In some cases we can make very likely guess on max size, then we
3125 set it into PROBABLE_MAX_SIZE. */
3127 static void
3128 determine_block_size (tree len, rtx len_rtx,
3129 unsigned HOST_WIDE_INT *min_size,
3130 unsigned HOST_WIDE_INT *max_size,
3131 unsigned HOST_WIDE_INT *probable_max_size)
3133 if (CONST_INT_P (len_rtx))
3135 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3136 return;
3138 else
3140 wide_int min, max;
3141 enum value_range_kind range_type = VR_UNDEFINED;
3143 /* Determine bounds from the type. */
3144 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3145 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3146 else
3147 *min_size = 0;
3148 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3149 *probable_max_size = *max_size
3150 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3151 else
3152 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3154 if (TREE_CODE (len) == SSA_NAME)
3156 value_range r;
3157 get_global_range_query ()->range_of_expr (r, len);
3158 range_type = r.kind ();
3159 if (range_type != VR_UNDEFINED)
3161 min = wi::to_wide (r.min ());
3162 max = wi::to_wide (r.max ());
3165 if (range_type == VR_RANGE)
3167 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3168 *min_size = min.to_uhwi ();
3169 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3170 *probable_max_size = *max_size = max.to_uhwi ();
3172 else if (range_type == VR_ANTI_RANGE)
3174 /* Code like
3176 int n;
3177 if (n < 100)
3178 memcpy (a, b, n)
3180 Produce anti range allowing negative values of N. We still
3181 can use the information and make a guess that N is not negative.
3183 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3184 *probable_max_size = min.to_uhwi () - 1;
3187 gcc_checking_assert (*max_size <=
3188 (unsigned HOST_WIDE_INT)
3189 GET_MODE_MASK (GET_MODE (len_rtx)));
3192 /* Expand a call EXP to the memcpy builtin.
3193 Return NULL_RTX if we failed, the caller should emit a normal call,
3194 otherwise try to get the result in TARGET, if convenient (and in
3195 mode MODE if that's convenient). */
3197 static rtx
3198 expand_builtin_memcpy (tree exp, rtx target)
3200 if (!validate_arglist (exp,
3201 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3202 return NULL_RTX;
3204 tree dest = CALL_EXPR_ARG (exp, 0);
3205 tree src = CALL_EXPR_ARG (exp, 1);
3206 tree len = CALL_EXPR_ARG (exp, 2);
3208 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3209 /*retmode=*/ RETURN_BEGIN, false);
3212 /* Check a call EXP to the memmove built-in for validity.
3213 Return NULL_RTX on both success and failure. */
3215 static rtx
3216 expand_builtin_memmove (tree exp, rtx target)
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3222 tree dest = CALL_EXPR_ARG (exp, 0);
3223 tree src = CALL_EXPR_ARG (exp, 1);
3224 tree len = CALL_EXPR_ARG (exp, 2);
3226 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3227 /*retmode=*/ RETURN_BEGIN, true);
3230 /* Expand a call EXP to the mempcpy builtin.
3231 Return NULL_RTX if we failed; the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3235 static rtx
3236 expand_builtin_mempcpy (tree exp, rtx target)
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 return NULL_RTX;
3242 tree dest = CALL_EXPR_ARG (exp, 0);
3243 tree src = CALL_EXPR_ARG (exp, 1);
3244 tree len = CALL_EXPR_ARG (exp, 2);
3246 /* Policy does not generally allow using compute_objsize (which
3247 is used internally by check_memop_size) to change code generation
3248 or drive optimization decisions.
3250 In this instance it is safe because the code we generate has
3251 the same semantics regardless of the return value of
3252 check_memop_sizes. Exactly the same amount of data is copied
3253 and the return value is exactly the same in both cases.
3255 Furthermore, check_memop_size always uses mode 0 for the call to
3256 compute_objsize, so the imprecise nature of compute_objsize is
3257 avoided. */
3259 /* Avoid expanding mempcpy into memcpy when the call is determined
3260 to overflow the buffer. This also prevents the same overflow
3261 from being diagnosed again when expanding memcpy. */
3263 return expand_builtin_mempcpy_args (dest, src, len,
3264 target, exp, /*retmode=*/ RETURN_END);
3267 /* Helper function to do the actual work for expand of memory copy family
3268 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3269 of memory from SRC to DEST and assign to TARGET if convenient. Return
3270 value is based on RETMODE argument. */
3272 static rtx
3273 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3274 rtx target, tree exp, memop_ret retmode,
3275 bool might_overlap)
3277 unsigned int src_align = get_pointer_alignment (src);
3278 unsigned int dest_align = get_pointer_alignment (dest);
3279 rtx dest_mem, src_mem, dest_addr, len_rtx;
3280 HOST_WIDE_INT expected_size = -1;
3281 unsigned int expected_align = 0;
3282 unsigned HOST_WIDE_INT min_size;
3283 unsigned HOST_WIDE_INT max_size;
3284 unsigned HOST_WIDE_INT probable_max_size;
3286 bool is_move_done;
3288 /* If DEST is not a pointer type, call the normal function. */
3289 if (dest_align == 0)
3290 return NULL_RTX;
3292 /* If either SRC is not a pointer type, don't do this
3293 operation in-line. */
3294 if (src_align == 0)
3295 return NULL_RTX;
3297 if (currently_expanding_gimple_stmt)
3298 stringop_block_profile (currently_expanding_gimple_stmt,
3299 &expected_align, &expected_size);
3301 if (expected_align < dest_align)
3302 expected_align = dest_align;
3303 dest_mem = get_memory_rtx (dest, len);
3304 set_mem_align (dest_mem, dest_align);
3305 len_rtx = expand_normal (len);
3306 determine_block_size (len, len_rtx, &min_size, &max_size,
3307 &probable_max_size);
3309 /* Try to get the byte representation of the constant SRC points to,
3310 with its byte size in NBYTES. */
3311 unsigned HOST_WIDE_INT nbytes;
3312 const char *rep = getbyterep (src, &nbytes);
3314 /* If the function's constant bound LEN_RTX is less than or equal
3315 to the byte size of the representation of the constant argument,
3316 and if block move would be done by pieces, we can avoid loading
3317 the bytes from memory and only store the computed constant.
3318 This works in the overlap (memmove) case as well because
3319 store_by_pieces just generates a series of stores of constants
3320 from the representation returned by getbyterep(). */
3321 if (rep
3322 && CONST_INT_P (len_rtx)
3323 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3324 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3325 CONST_CAST (char *, rep),
3326 dest_align, false))
3328 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3329 builtin_memcpy_read_str,
3330 CONST_CAST (char *, rep),
3331 dest_align, false, retmode);
3332 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3333 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3334 return dest_mem;
3337 src_mem = get_memory_rtx (src, len);
3338 set_mem_align (src_mem, src_align);
3340 /* Copy word part most expediently. */
3341 enum block_op_methods method = BLOCK_OP_NORMAL;
3342 if (CALL_EXPR_TAILCALL (exp)
3343 && (retmode == RETURN_BEGIN || target == const0_rtx))
3344 method = BLOCK_OP_TAILCALL;
3345 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3346 && retmode == RETURN_END
3347 && !might_overlap
3348 && target != const0_rtx);
3349 if (use_mempcpy_call)
3350 method = BLOCK_OP_NO_LIBCALL_RET;
3351 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3352 expected_align, expected_size,
3353 min_size, max_size, probable_max_size,
3354 use_mempcpy_call, &is_move_done,
3355 might_overlap);
3357 /* Bail out when a mempcpy call would be expanded as libcall and when
3358 we have a target that provides a fast implementation
3359 of mempcpy routine. */
3360 if (!is_move_done)
3361 return NULL_RTX;
3363 if (dest_addr == pc_rtx)
3364 return NULL_RTX;
3366 if (dest_addr == 0)
3368 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3369 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3372 if (retmode != RETURN_BEGIN && target != const0_rtx)
3374 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3375 /* stpcpy pointer to last byte. */
3376 if (retmode == RETURN_END_MINUS_ONE)
3377 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3380 return dest_addr;
3383 static rtx
3384 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3385 rtx target, tree orig_exp, memop_ret retmode)
3387 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3388 retmode, false);
3391 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3392 we failed, the caller should emit a normal call, otherwise try to
3393 get the result in TARGET, if convenient.
3394 Return value is based on RETMODE argument. */
3396 static rtx
3397 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3399 class expand_operand ops[3];
3400 rtx dest_mem;
3401 rtx src_mem;
3403 if (!targetm.have_movstr ())
3404 return NULL_RTX;
3406 dest_mem = get_memory_rtx (dest, NULL);
3407 src_mem = get_memory_rtx (src, NULL);
3408 if (retmode == RETURN_BEGIN)
3410 target = force_reg (Pmode, XEXP (dest_mem, 0));
3411 dest_mem = replace_equiv_address (dest_mem, target);
3414 create_output_operand (&ops[0],
3415 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3416 create_fixed_operand (&ops[1], dest_mem);
3417 create_fixed_operand (&ops[2], src_mem);
3418 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3419 return NULL_RTX;
3421 if (retmode != RETURN_BEGIN && target != const0_rtx)
3423 target = ops[0].value;
3424 /* movstr is supposed to set end to the address of the NUL
3425 terminator. If the caller requested a mempcpy-like return value,
3426 adjust it. */
3427 if (retmode == RETURN_END)
3429 rtx tem = plus_constant (GET_MODE (target),
3430 gen_lowpart (GET_MODE (target), target), 1);
3431 emit_move_insn (target, force_operand (tem, NULL_RTX));
3434 return target;
3437 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3438 NULL_RTX if we failed the caller should emit a normal call, otherwise
3439 try to get the result in TARGET, if convenient (and in mode MODE if that's
3440 convenient). */
3442 static rtx
3443 expand_builtin_strcpy (tree exp, rtx target)
3445 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3446 return NULL_RTX;
3448 tree dest = CALL_EXPR_ARG (exp, 0);
3449 tree src = CALL_EXPR_ARG (exp, 1);
3451 return expand_builtin_strcpy_args (exp, dest, src, target);
3454 /* Helper function to do the actual work for expand_builtin_strcpy. The
3455 arguments to the builtin_strcpy call DEST and SRC are broken out
3456 so that this can also be called without constructing an actual CALL_EXPR.
3457 The other arguments and return value are the same as for
3458 expand_builtin_strcpy. */
3460 static rtx
3461 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3463 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3466 /* Expand a call EXP to the stpcpy builtin.
3467 Return NULL_RTX if we failed the caller should emit a normal call,
3468 otherwise try to get the result in TARGET, if convenient (and in
3469 mode MODE if that's convenient). */
3471 static rtx
3472 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3474 tree dst, src;
3475 location_t loc = EXPR_LOCATION (exp);
3477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3478 return NULL_RTX;
3480 dst = CALL_EXPR_ARG (exp, 0);
3481 src = CALL_EXPR_ARG (exp, 1);
3483 /* If return value is ignored, transform stpcpy into strcpy. */
3484 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3486 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3487 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3488 return expand_expr (result, target, mode, EXPAND_NORMAL);
3490 else
3492 tree len, lenp1;
3493 rtx ret;
3495 /* Ensure we get an actual string whose length can be evaluated at
3496 compile-time, not an expression containing a string. This is
3497 because the latter will potentially produce pessimized code
3498 when used to produce the return value. */
3499 c_strlen_data lendata = { };
3500 if (!c_getstr (src)
3501 || !(len = c_strlen (src, 0, &lendata, 1)))
3502 return expand_movstr (dst, src, target,
3503 /*retmode=*/ RETURN_END_MINUS_ONE);
3505 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3506 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3507 target, exp,
3508 /*retmode=*/ RETURN_END_MINUS_ONE);
3510 if (ret)
3511 return ret;
3513 if (TREE_CODE (len) == INTEGER_CST)
3515 rtx len_rtx = expand_normal (len);
3517 if (CONST_INT_P (len_rtx))
3519 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3521 if (ret)
3523 if (! target)
3525 if (mode != VOIDmode)
3526 target = gen_reg_rtx (mode);
3527 else
3528 target = gen_reg_rtx (GET_MODE (ret));
3530 if (GET_MODE (target) != GET_MODE (ret))
3531 ret = gen_lowpart (GET_MODE (target), ret);
3533 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3534 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3535 gcc_assert (ret);
3537 return target;
3542 return expand_movstr (dst, src, target,
3543 /*retmode=*/ RETURN_END_MINUS_ONE);
3547 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3548 arguments while being careful to avoid duplicate warnings (which could
3549 be issued if the expander were to expand the call, resulting in it
3550 being emitted in expand_call(). */
3552 static rtx
3553 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3555 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3557 /* The call has been successfully expanded. Check for nonstring
3558 arguments and issue warnings as appropriate. */
3559 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3560 return ret;
3563 return NULL_RTX;
3566 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3567 bytes from constant string DATA + OFFSET and return it as target
3568 constant. */
3571 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3572 fixed_size_mode mode)
3574 const char *str = (const char *) data;
3576 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3577 return const0_rtx;
3579 /* The by-pieces infrastructure does not try to pick a vector mode
3580 for strncpy expansion. */
3581 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3584 /* Helper to check the sizes of sequences and the destination of calls
3585 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3586 success (no overflow or invalid sizes), false otherwise. */
3588 static bool
3589 check_strncat_sizes (tree exp, tree objsize)
3591 tree dest = CALL_EXPR_ARG (exp, 0);
3592 tree src = CALL_EXPR_ARG (exp, 1);
3593 tree maxread = CALL_EXPR_ARG (exp, 2);
3595 /* Try to determine the range of lengths that the source expression
3596 refers to. */
3597 c_strlen_data lendata = { };
3598 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3600 /* Try to verify that the destination is big enough for the shortest
3601 string. */
3603 access_data data (exp, access_read_write, maxread, true);
3604 if (!objsize && warn_stringop_overflow)
3606 /* If it hasn't been provided by __strncat_chk, try to determine
3607 the size of the destination object into which the source is
3608 being copied. */
3609 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3612 /* Add one for the terminating nul. */
3613 tree srclen = (lendata.minlen
3614 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3615 size_one_node)
3616 : NULL_TREE);
3618 /* The strncat function copies at most MAXREAD bytes and always appends
3619 the terminating nul so the specified upper bound should never be equal
3620 to (or greater than) the size of the destination. */
3621 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3622 && tree_int_cst_equal (objsize, maxread))
3624 location_t loc = EXPR_LOCATION (exp);
3625 warning_at (loc, OPT_Wstringop_overflow_,
3626 "%qD specified bound %E equals destination size",
3627 get_callee_fndecl (exp), maxread);
3629 return false;
3632 if (!srclen
3633 || (maxread && tree_fits_uhwi_p (maxread)
3634 && tree_fits_uhwi_p (srclen)
3635 && tree_int_cst_lt (maxread, srclen)))
3636 srclen = maxread;
3638 /* The number of bytes to write is LEN but check_access will alsoa
3639 check SRCLEN if LEN's value isn't known. */
3640 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3641 objsize, data.mode, &data);
3644 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3645 NULL_RTX if we failed the caller should emit a normal call. */
3647 static rtx
3648 expand_builtin_strncpy (tree exp, rtx target)
3650 location_t loc = EXPR_LOCATION (exp);
3652 if (!validate_arglist (exp,
3653 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX;
3655 tree dest = CALL_EXPR_ARG (exp, 0);
3656 tree src = CALL_EXPR_ARG (exp, 1);
3657 /* The number of bytes to write (not the maximum). */
3658 tree len = CALL_EXPR_ARG (exp, 2);
3660 /* The length of the source sequence. */
3661 tree slen = c_strlen (src, 1);
3663 /* We must be passed a constant len and src parameter. */
3664 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3665 return NULL_RTX;
3667 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3669 /* We're required to pad with trailing zeros if the requested
3670 len is greater than strlen(s2)+1. In that case try to
3671 use store_by_pieces, if it fails, punt. */
3672 if (tree_int_cst_lt (slen, len))
3674 unsigned int dest_align = get_pointer_alignment (dest);
3675 const char *p = c_getstr (src);
3676 rtx dest_mem;
3678 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3679 || !can_store_by_pieces (tree_to_uhwi (len),
3680 builtin_strncpy_read_str,
3681 CONST_CAST (char *, p),
3682 dest_align, false))
3683 return NULL_RTX;
3685 dest_mem = get_memory_rtx (dest, len);
3686 store_by_pieces (dest_mem, tree_to_uhwi (len),
3687 builtin_strncpy_read_str,
3688 CONST_CAST (char *, p), dest_align, false,
3689 RETURN_BEGIN);
3690 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3691 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3692 return dest_mem;
3695 return NULL_RTX;
3698 /* Return the RTL of a register in MODE generated from PREV in the
3699 previous iteration. */
3701 static rtx
3702 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3704 rtx target = nullptr;
3705 if (prev != nullptr && prev->data != nullptr)
3707 /* Use the previous data in the same mode. */
3708 if (prev->mode == mode)
3709 return prev->data;
3711 fixed_size_mode prev_mode = prev->mode;
3713 /* Don't use the previous data to write QImode if it is in a
3714 vector mode. */
3715 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3716 return target;
3718 rtx prev_rtx = prev->data;
3720 if (REG_P (prev_rtx)
3721 && HARD_REGISTER_P (prev_rtx)
3722 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3724 /* This case occurs when PREV_MODE is a vector and when
3725 MODE is too small to store using vector operations.
3726 After register allocation, the code will need to move the
3727 lowpart of the vector register into a non-vector register.
3729 Also, the target has chosen to use a hard register
3730 instead of going with the default choice of using a
3731 pseudo register. We should respect that choice and try to
3732 avoid creating a pseudo register with the same mode as the
3733 current hard register.
3735 In principle, we could just use a lowpart MODE subreg of
3736 the vector register. However, the vector register mode might
3737 be too wide for non-vector registers, and we already know
3738 that the non-vector mode is too small for vector registers.
3739 It's therefore likely that we'd need to spill to memory in
3740 the vector mode and reload the non-vector value from there.
3742 Try to avoid that by reducing the vector register to the
3743 smallest size that it can hold. This should increase the
3744 chances that non-vector registers can hold both the inner
3745 and outer modes of the subreg that we generate later. */
3746 machine_mode m;
3747 fixed_size_mode candidate;
3748 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3749 if (is_a<fixed_size_mode> (m, &candidate))
3751 if (GET_MODE_SIZE (candidate)
3752 >= GET_MODE_SIZE (prev_mode))
3753 break;
3754 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3755 && lowpart_subreg_regno (REGNO (prev_rtx),
3756 prev_mode, candidate) >= 0)
3758 target = lowpart_subreg (candidate, prev_rtx,
3759 prev_mode);
3760 prev_rtx = target;
3761 prev_mode = candidate;
3762 break;
3765 if (target == nullptr)
3766 prev_rtx = copy_to_reg (prev_rtx);
3769 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3771 return target;
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3776 constant. If PREV isn't nullptr, it has the RTL info from the
3777 previous iteration. */
3780 builtin_memset_read_str (void *data, void *prev,
3781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3782 fixed_size_mode mode)
3784 const char *c = (const char *) data;
3785 unsigned int size = GET_MODE_SIZE (mode);
3787 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3788 mode);
3789 if (target != nullptr)
3790 return target;
3791 rtx src = gen_int_mode (*c, QImode);
3793 if (VECTOR_MODE_P (mode))
3795 gcc_assert (GET_MODE_INNER (mode) == QImode);
3797 rtx const_vec = gen_const_vec_duplicate (mode, src);
3798 if (prev == NULL)
3799 /* Return CONST_VECTOR when called by a query function. */
3800 return const_vec;
3802 /* Use the move expander with CONST_VECTOR. */
3803 target = targetm.gen_memset_scratch_rtx (mode);
3804 emit_move_insn (target, const_vec);
3805 return target;
3808 char *p = XALLOCAVEC (char, size);
3810 memset (p, *c, size);
3812 /* Vector modes should be handled above. */
3813 return c_readstr (p, as_a <scalar_int_mode> (mode));
3816 /* Callback routine for store_by_pieces. Return the RTL of a register
3817 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3818 char value given in the RTL register data. For example, if mode is
3819 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3820 nullptr, it has the RTL info from the previous iteration. */
3822 static rtx
3823 builtin_memset_gen_str (void *data, void *prev,
3824 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3825 fixed_size_mode mode)
3827 rtx target, coeff;
3828 size_t size;
3829 char *p;
3831 size = GET_MODE_SIZE (mode);
3832 if (size == 1)
3833 return (rtx) data;
3835 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3836 if (target != nullptr)
3837 return target;
3839 if (VECTOR_MODE_P (mode))
3841 gcc_assert (GET_MODE_INNER (mode) == QImode);
3843 /* vec_duplicate_optab is a precondition to pick a vector mode for
3844 the memset expander. */
3845 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3847 target = targetm.gen_memset_scratch_rtx (mode);
3848 class expand_operand ops[2];
3849 create_output_operand (&ops[0], target, mode);
3850 create_input_operand (&ops[1], (rtx) data, QImode);
3851 expand_insn (icode, 2, ops);
3852 if (!rtx_equal_p (target, ops[0].value))
3853 emit_move_insn (target, ops[0].value);
3855 return target;
3858 p = XALLOCAVEC (char, size);
3859 memset (p, 1, size);
3860 /* Vector modes should be handled above. */
3861 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3863 target = convert_to_mode (mode, (rtx) data, 1);
3864 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3865 return force_reg (mode, target);
3868 /* Expand expression EXP, which is a call to the memset builtin. Return
3869 NULL_RTX if we failed the caller should emit a normal call, otherwise
3870 try to get the result in TARGET, if convenient (and in mode MODE if that's
3871 convenient). */
3874 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3876 if (!validate_arglist (exp,
3877 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3878 return NULL_RTX;
3880 tree dest = CALL_EXPR_ARG (exp, 0);
3881 tree val = CALL_EXPR_ARG (exp, 1);
3882 tree len = CALL_EXPR_ARG (exp, 2);
3884 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3887 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3888 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3889 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3890 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3892 The strategy is to issue one store_by_pieces for each power of two,
3893 from most to least significant, guarded by a test on whether there
3894 are at least that many bytes left to copy in LEN.
3896 ??? Should we skip some powers of two in favor of loops? Maybe start
3897 at the max of TO/LEN/word alignment, at least when optimizing for
3898 size, instead of ensuring O(log len) dynamic compares? */
3900 bool
3901 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3902 unsigned HOST_WIDE_INT min_len,
3903 unsigned HOST_WIDE_INT max_len,
3904 rtx val, char valc, unsigned int align)
3906 int max_bits = floor_log2 (max_len);
3907 int min_bits = floor_log2 (min_len);
3908 int sctz_len = ctz_len;
3910 gcc_checking_assert (sctz_len >= 0);
3912 if (val)
3913 valc = 1;
3915 /* Bits more significant than TST_BITS are part of the shared prefix
3916 in the binary representation of both min_len and max_len. Since
3917 they're identical, we don't need to test them in the loop. */
3918 int tst_bits = (max_bits != min_bits ? max_bits
3919 : floor_log2 (max_len ^ min_len));
3921 /* Check whether it's profitable to start by storing a fixed BLKSIZE
3922 bytes, to lower max_bits. In the unlikely case of a constant LEN
3923 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3924 single store_by_pieces, but otherwise, select the minimum multiple
3925 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3926 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
3927 unsigned HOST_WIDE_INT blksize;
3928 if (max_len > min_len)
3930 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
3931 align / BITS_PER_UNIT);
3932 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
3933 blksize &= ~(alrng - 1);
3935 else if (max_len == min_len)
3936 blksize = max_len;
3937 else
3938 gcc_unreachable ();
3939 if (min_len >= blksize)
3941 min_len -= blksize;
3942 min_bits = floor_log2 (min_len);
3943 max_len -= blksize;
3944 max_bits = floor_log2 (max_len);
3946 tst_bits = (max_bits != min_bits ? max_bits
3947 : floor_log2 (max_len ^ min_len));
3949 else
3950 blksize = 0;
3952 /* Check that we can use store by pieces for the maximum store count
3953 we may issue (initial fixed-size block, plus conditional
3954 power-of-two-sized from max_bits to ctz_len. */
3955 unsigned HOST_WIDE_INT xlenest = blksize;
3956 if (max_bits >= 0)
3957 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
3958 - (HOST_WIDE_INT_1U << ctz_len));
3959 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
3960 &valc, align, true))
3961 return false;
3963 by_pieces_constfn constfun;
3964 void *constfundata;
3965 if (val)
3967 constfun = builtin_memset_gen_str;
3968 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
3969 val);
3971 else
3973 constfun = builtin_memset_read_str;
3974 constfundata = &valc;
3977 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
3978 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
3979 to = replace_equiv_address (to, ptr);
3980 set_mem_align (to, align);
3982 if (blksize)
3984 to = store_by_pieces (to, blksize,
3985 constfun, constfundata,
3986 align, true,
3987 max_len != 0 ? RETURN_END : RETURN_BEGIN);
3988 if (max_len == 0)
3989 return true;
3991 /* Adjust PTR, TO and REM. Since TO's address is likely
3992 PTR+offset, we have to replace it. */
3993 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
3994 to = replace_equiv_address (to, ptr);
3995 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
3996 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
3999 /* Iterate over power-of-two block sizes from the maximum length to
4000 the least significant bit possibly set in the length. */
4001 for (int i = max_bits; i >= sctz_len; i--)
4003 rtx_code_label *label = NULL;
4004 blksize = HOST_WIDE_INT_1U << i;
4006 /* If we're past the bits shared between min_ and max_len, expand
4007 a test on the dynamic length, comparing it with the
4008 BLKSIZE. */
4009 if (i <= tst_bits)
4011 label = gen_label_rtx ();
4012 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4013 ptr_mode, 1, label,
4014 profile_probability::even ());
4016 /* If we are at a bit that is in the prefix shared by min_ and
4017 max_len, skip this BLKSIZE if the bit is clear. */
4018 else if ((max_len & blksize) == 0)
4019 continue;
4021 /* Issue a store of BLKSIZE bytes. */
4022 to = store_by_pieces (to, blksize,
4023 constfun, constfundata,
4024 align, true,
4025 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4027 /* Adjust REM and PTR, unless this is the last iteration. */
4028 if (i != sctz_len)
4030 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4031 to = replace_equiv_address (to, ptr);
4032 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4033 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4036 if (label)
4038 emit_label (label);
4040 /* Given conditional stores, the offset can no longer be
4041 known, so clear it. */
4042 clear_mem_offset (to);
4046 return true;
4049 /* Helper function to do the actual work for expand_builtin_memset. The
4050 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4051 so that this can also be called without constructing an actual CALL_EXPR.
4052 The other arguments and return value are the same as for
4053 expand_builtin_memset. */
4055 static rtx
4056 expand_builtin_memset_args (tree dest, tree val, tree len,
4057 rtx target, machine_mode mode, tree orig_exp)
4059 tree fndecl, fn;
4060 enum built_in_function fcode;
4061 machine_mode val_mode;
4062 char c;
4063 unsigned int dest_align;
4064 rtx dest_mem, dest_addr, len_rtx;
4065 HOST_WIDE_INT expected_size = -1;
4066 unsigned int expected_align = 0;
4067 unsigned HOST_WIDE_INT min_size;
4068 unsigned HOST_WIDE_INT max_size;
4069 unsigned HOST_WIDE_INT probable_max_size;
4071 dest_align = get_pointer_alignment (dest);
4073 /* If DEST is not a pointer type, don't do this operation in-line. */
4074 if (dest_align == 0)
4075 return NULL_RTX;
4077 if (currently_expanding_gimple_stmt)
4078 stringop_block_profile (currently_expanding_gimple_stmt,
4079 &expected_align, &expected_size);
4081 if (expected_align < dest_align)
4082 expected_align = dest_align;
4084 /* If the LEN parameter is zero, return DEST. */
4085 if (integer_zerop (len))
4087 /* Evaluate and ignore VAL in case it has side-effects. */
4088 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4089 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4092 /* Stabilize the arguments in case we fail. */
4093 dest = builtin_save_expr (dest);
4094 val = builtin_save_expr (val);
4095 len = builtin_save_expr (len);
4097 len_rtx = expand_normal (len);
4098 determine_block_size (len, len_rtx, &min_size, &max_size,
4099 &probable_max_size);
4100 dest_mem = get_memory_rtx (dest, len);
4101 val_mode = TYPE_MODE (unsigned_char_type_node);
4103 if (TREE_CODE (val) != INTEGER_CST
4104 || target_char_cast (val, &c))
4106 rtx val_rtx;
4108 val_rtx = expand_normal (val);
4109 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4111 /* Assume that we can memset by pieces if we can store
4112 * the coefficients by pieces (in the required modes).
4113 * We can't pass builtin_memset_gen_str as that emits RTL. */
4114 c = 1;
4115 if (tree_fits_uhwi_p (len)
4116 && can_store_by_pieces (tree_to_uhwi (len),
4117 builtin_memset_read_str, &c, dest_align,
4118 true))
4120 val_rtx = force_reg (val_mode, val_rtx);
4121 store_by_pieces (dest_mem, tree_to_uhwi (len),
4122 builtin_memset_gen_str, val_rtx, dest_align,
4123 true, RETURN_BEGIN);
4125 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4126 dest_align, expected_align,
4127 expected_size, min_size, max_size,
4128 probable_max_size)
4129 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4130 tree_ctz (len),
4131 min_size, max_size,
4132 val_rtx, 0,
4133 dest_align))
4134 goto do_libcall;
4136 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4137 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4138 return dest_mem;
4141 if (c)
4143 if (tree_fits_uhwi_p (len)
4144 && can_store_by_pieces (tree_to_uhwi (len),
4145 builtin_memset_read_str, &c, dest_align,
4146 true))
4147 store_by_pieces (dest_mem, tree_to_uhwi (len),
4148 builtin_memset_read_str, &c, dest_align, true,
4149 RETURN_BEGIN);
4150 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4151 gen_int_mode (c, val_mode),
4152 dest_align, expected_align,
4153 expected_size, min_size, max_size,
4154 probable_max_size)
4155 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4156 tree_ctz (len),
4157 min_size, max_size,
4158 NULL_RTX, c,
4159 dest_align))
4160 goto do_libcall;
4162 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4163 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4164 return dest_mem;
4167 set_mem_align (dest_mem, dest_align);
4168 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4169 CALL_EXPR_TAILCALL (orig_exp)
4170 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4171 expected_align, expected_size,
4172 min_size, max_size,
4173 probable_max_size, tree_ctz (len));
4175 if (dest_addr == 0)
4177 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4178 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4181 return dest_addr;
4183 do_libcall:
4184 fndecl = get_callee_fndecl (orig_exp);
4185 fcode = DECL_FUNCTION_CODE (fndecl);
4186 if (fcode == BUILT_IN_MEMSET)
4187 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4188 dest, val, len);
4189 else if (fcode == BUILT_IN_BZERO)
4190 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4191 dest, len);
4192 else
4193 gcc_unreachable ();
4194 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4195 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4196 return expand_call (fn, target, target == const0_rtx);
4199 /* Expand expression EXP, which is a call to the bzero builtin. Return
4200 NULL_RTX if we failed the caller should emit a normal call. */
4202 static rtx
4203 expand_builtin_bzero (tree exp)
4205 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4206 return NULL_RTX;
4208 tree dest = CALL_EXPR_ARG (exp, 0);
4209 tree size = CALL_EXPR_ARG (exp, 1);
4211 /* New argument list transforming bzero(ptr x, int y) to
4212 memset(ptr x, int 0, size_t y). This is done this way
4213 so that if it isn't expanded inline, we fallback to
4214 calling bzero instead of memset. */
4216 location_t loc = EXPR_LOCATION (exp);
4218 return expand_builtin_memset_args (dest, integer_zero_node,
4219 fold_convert_loc (loc,
4220 size_type_node, size),
4221 const0_rtx, VOIDmode, exp);
4224 /* Try to expand cmpstr operation ICODE with the given operands.
4225 Return the result rtx on success, otherwise return null. */
4227 static rtx
4228 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4229 HOST_WIDE_INT align)
4231 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4233 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4234 target = NULL_RTX;
4236 class expand_operand ops[4];
4237 create_output_operand (&ops[0], target, insn_mode);
4238 create_fixed_operand (&ops[1], arg1_rtx);
4239 create_fixed_operand (&ops[2], arg2_rtx);
4240 create_integer_operand (&ops[3], align);
4241 if (maybe_expand_insn (icode, 4, ops))
4242 return ops[0].value;
4243 return NULL_RTX;
4246 /* Expand expression EXP, which is a call to the memcmp built-in function.
4247 Return NULL_RTX if we failed and the caller should emit a normal call,
4248 otherwise try to get the result in TARGET, if convenient.
4249 RESULT_EQ is true if we can relax the returned value to be either zero
4250 or nonzero, without caring about the sign. */
4252 static rtx
4253 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4259 tree arg1 = CALL_EXPR_ARG (exp, 0);
4260 tree arg2 = CALL_EXPR_ARG (exp, 1);
4261 tree len = CALL_EXPR_ARG (exp, 2);
4263 /* Diagnose calls where the specified length exceeds the size of either
4264 object. */
4265 if (!check_read_access (exp, arg1, len, 0)
4266 || !check_read_access (exp, arg2, len, 0))
4267 return NULL_RTX;
4269 /* Due to the performance benefit, always inline the calls first
4270 when result_eq is false. */
4271 rtx result = NULL_RTX;
4272 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4273 if (!result_eq && fcode != BUILT_IN_BCMP)
4275 result = inline_expand_builtin_bytecmp (exp, target);
4276 if (result)
4277 return result;
4280 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4281 location_t loc = EXPR_LOCATION (exp);
4283 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4284 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4286 /* If we don't have POINTER_TYPE, call the function. */
4287 if (arg1_align == 0 || arg2_align == 0)
4288 return NULL_RTX;
4290 rtx arg1_rtx = get_memory_rtx (arg1, len);
4291 rtx arg2_rtx = get_memory_rtx (arg2, len);
4292 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4294 /* Set MEM_SIZE as appropriate. */
4295 if (CONST_INT_P (len_rtx))
4297 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4298 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4301 by_pieces_constfn constfn = NULL;
4303 /* Try to get the byte representation of the constant ARG2 (or, only
4304 when the function's result is used for equality to zero, ARG1)
4305 points to, with its byte size in NBYTES. */
4306 unsigned HOST_WIDE_INT nbytes;
4307 const char *rep = getbyterep (arg2, &nbytes);
4308 if (result_eq && rep == NULL)
4310 /* For equality to zero the arguments are interchangeable. */
4311 rep = getbyterep (arg1, &nbytes);
4312 if (rep != NULL)
4313 std::swap (arg1_rtx, arg2_rtx);
4316 /* If the function's constant bound LEN_RTX is less than or equal
4317 to the byte size of the representation of the constant argument,
4318 and if block move would be done by pieces, we can avoid loading
4319 the bytes from memory and only store the computed constant result. */
4320 if (rep
4321 && CONST_INT_P (len_rtx)
4322 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4323 constfn = builtin_memcpy_read_str;
4325 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4326 TREE_TYPE (len), target,
4327 result_eq, constfn,
4328 CONST_CAST (char *, rep));
4330 if (result)
4332 /* Return the value in the proper mode for this function. */
4333 if (GET_MODE (result) == mode)
4334 return result;
4336 if (target != 0)
4338 convert_move (target, result, 0);
4339 return target;
4342 return convert_to_mode (mode, result, 0);
4345 return NULL_RTX;
4348 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4349 if we failed the caller should emit a normal call, otherwise try to get
4350 the result in TARGET, if convenient. */
4352 static rtx
4353 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4355 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4356 return NULL_RTX;
4358 tree arg1 = CALL_EXPR_ARG (exp, 0);
4359 tree arg2 = CALL_EXPR_ARG (exp, 1);
4361 /* Due to the performance benefit, always inline the calls first. */
4362 rtx result = NULL_RTX;
4363 result = inline_expand_builtin_bytecmp (exp, target);
4364 if (result)
4365 return result;
4367 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4368 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4369 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4370 return NULL_RTX;
4372 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4373 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4375 /* If we don't have POINTER_TYPE, call the function. */
4376 if (arg1_align == 0 || arg2_align == 0)
4377 return NULL_RTX;
4379 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4380 arg1 = builtin_save_expr (arg1);
4381 arg2 = builtin_save_expr (arg2);
4383 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4384 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4386 /* Try to call cmpstrsi. */
4387 if (cmpstr_icode != CODE_FOR_nothing)
4388 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4389 MIN (arg1_align, arg2_align));
4391 /* Try to determine at least one length and call cmpstrnsi. */
4392 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4394 tree len;
4395 rtx arg3_rtx;
4397 tree len1 = c_strlen (arg1, 1);
4398 tree len2 = c_strlen (arg2, 1);
4400 if (len1)
4401 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4402 if (len2)
4403 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4405 /* If we don't have a constant length for the first, use the length
4406 of the second, if we know it. We don't require a constant for
4407 this case; some cost analysis could be done if both are available
4408 but neither is constant. For now, assume they're equally cheap,
4409 unless one has side effects. If both strings have constant lengths,
4410 use the smaller. */
4412 if (!len1)
4413 len = len2;
4414 else if (!len2)
4415 len = len1;
4416 else if (TREE_SIDE_EFFECTS (len1))
4417 len = len2;
4418 else if (TREE_SIDE_EFFECTS (len2))
4419 len = len1;
4420 else if (TREE_CODE (len1) != INTEGER_CST)
4421 len = len2;
4422 else if (TREE_CODE (len2) != INTEGER_CST)
4423 len = len1;
4424 else if (tree_int_cst_lt (len1, len2))
4425 len = len1;
4426 else
4427 len = len2;
4429 /* If both arguments have side effects, we cannot optimize. */
4430 if (len && !TREE_SIDE_EFFECTS (len))
4432 arg3_rtx = expand_normal (len);
4433 result = expand_cmpstrn_or_cmpmem
4434 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4435 arg3_rtx, MIN (arg1_align, arg2_align));
4439 tree fndecl = get_callee_fndecl (exp);
4440 if (result)
4442 /* Return the value in the proper mode for this function. */
4443 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4444 if (GET_MODE (result) == mode)
4445 return result;
4446 if (target == 0)
4447 return convert_to_mode (mode, result, 0);
4448 convert_move (target, result, 0);
4449 return target;
4452 /* Expand the library call ourselves using a stabilized argument
4453 list to avoid re-evaluating the function's arguments twice. */
4454 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4455 copy_warning (fn, exp);
4456 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4457 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4458 return expand_call (fn, target, target == const0_rtx);
4461 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4462 NULL_RTX if we failed the caller should emit a normal call, otherwise
4463 try to get the result in TARGET, if convenient. */
4465 static rtx
4466 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4467 ATTRIBUTE_UNUSED machine_mode mode)
4469 if (!validate_arglist (exp,
4470 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4471 return NULL_RTX;
4473 tree arg1 = CALL_EXPR_ARG (exp, 0);
4474 tree arg2 = CALL_EXPR_ARG (exp, 1);
4475 tree arg3 = CALL_EXPR_ARG (exp, 2);
4477 location_t loc = EXPR_LOCATION (exp);
4478 tree len1 = c_strlen (arg1, 1);
4479 tree len2 = c_strlen (arg2, 1);
4481 /* Due to the performance benefit, always inline the calls first. */
4482 rtx result = NULL_RTX;
4483 result = inline_expand_builtin_bytecmp (exp, target);
4484 if (result)
4485 return result;
4487 /* If c_strlen can determine an expression for one of the string
4488 lengths, and it doesn't have side effects, then emit cmpstrnsi
4489 using length MIN(strlen(string)+1, arg3). */
4490 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4491 if (cmpstrn_icode == CODE_FOR_nothing)
4492 return NULL_RTX;
4494 tree len;
4496 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4497 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4499 if (len1)
4500 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4501 if (len2)
4502 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4504 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4506 /* If we don't have a constant length for the first, use the length
4507 of the second, if we know it. If neither string is constant length,
4508 use the given length argument. We don't require a constant for
4509 this case; some cost analysis could be done if both are available
4510 but neither is constant. For now, assume they're equally cheap,
4511 unless one has side effects. If both strings have constant lengths,
4512 use the smaller. */
4514 if (!len1 && !len2)
4515 len = len3;
4516 else if (!len1)
4517 len = len2;
4518 else if (!len2)
4519 len = len1;
4520 else if (TREE_SIDE_EFFECTS (len1))
4521 len = len2;
4522 else if (TREE_SIDE_EFFECTS (len2))
4523 len = len1;
4524 else if (TREE_CODE (len1) != INTEGER_CST)
4525 len = len2;
4526 else if (TREE_CODE (len2) != INTEGER_CST)
4527 len = len1;
4528 else if (tree_int_cst_lt (len1, len2))
4529 len = len1;
4530 else
4531 len = len2;
4533 /* If we are not using the given length, we must incorporate it here.
4534 The actual new length parameter will be MIN(len,arg3) in this case. */
4535 if (len != len3)
4537 len = fold_convert_loc (loc, sizetype, len);
4538 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4540 rtx arg1_rtx = get_memory_rtx (arg1, len);
4541 rtx arg2_rtx = get_memory_rtx (arg2, len);
4542 rtx arg3_rtx = expand_normal (len);
4543 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4544 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4545 MIN (arg1_align, arg2_align));
4547 tree fndecl = get_callee_fndecl (exp);
4548 if (result)
4550 /* Return the value in the proper mode for this function. */
4551 mode = TYPE_MODE (TREE_TYPE (exp));
4552 if (GET_MODE (result) == mode)
4553 return result;
4554 if (target == 0)
4555 return convert_to_mode (mode, result, 0);
4556 convert_move (target, result, 0);
4557 return target;
4560 /* Expand the library call ourselves using a stabilized argument
4561 list to avoid re-evaluating the function's arguments twice. */
4562 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4563 copy_warning (call, exp);
4564 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4565 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4566 return expand_call (call, target, target == const0_rtx);
4569 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4570 if that's convenient. */
4573 expand_builtin_saveregs (void)
4575 rtx val;
4576 rtx_insn *seq;
4578 /* Don't do __builtin_saveregs more than once in a function.
4579 Save the result of the first call and reuse it. */
4580 if (saveregs_value != 0)
4581 return saveregs_value;
4583 /* When this function is called, it means that registers must be
4584 saved on entry to this function. So we migrate the call to the
4585 first insn of this function. */
4587 start_sequence ();
4589 /* Do whatever the machine needs done in this case. */
4590 val = targetm.calls.expand_builtin_saveregs ();
4592 seq = get_insns ();
4593 end_sequence ();
4595 saveregs_value = val;
4597 /* Put the insns after the NOTE that starts the function. If this
4598 is inside a start_sequence, make the outer-level insn chain current, so
4599 the code is placed at the start of the function. */
4600 push_topmost_sequence ();
4601 emit_insn_after (seq, entry_of_function ());
4602 pop_topmost_sequence ();
4604 return val;
4607 /* Expand a call to __builtin_next_arg. */
4609 static rtx
4610 expand_builtin_next_arg (void)
4612 /* Checking arguments is already done in fold_builtin_next_arg
4613 that must be called before this function. */
4614 return expand_binop (ptr_mode, add_optab,
4615 crtl->args.internal_arg_pointer,
4616 crtl->args.arg_offset_rtx,
4617 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4620 /* Make it easier for the backends by protecting the valist argument
4621 from multiple evaluations. */
4623 static tree
4624 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4626 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4628 /* The current way of determining the type of valist is completely
4629 bogus. We should have the information on the va builtin instead. */
4630 if (!vatype)
4631 vatype = targetm.fn_abi_va_list (cfun->decl);
4633 if (TREE_CODE (vatype) == ARRAY_TYPE)
4635 if (TREE_SIDE_EFFECTS (valist))
4636 valist = save_expr (valist);
4638 /* For this case, the backends will be expecting a pointer to
4639 vatype, but it's possible we've actually been given an array
4640 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4641 So fix it. */
4642 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4644 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4645 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4648 else
4650 tree pt = build_pointer_type (vatype);
4652 if (! needs_lvalue)
4654 if (! TREE_SIDE_EFFECTS (valist))
4655 return valist;
4657 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4658 TREE_SIDE_EFFECTS (valist) = 1;
4661 if (TREE_SIDE_EFFECTS (valist))
4662 valist = save_expr (valist);
4663 valist = fold_build2_loc (loc, MEM_REF,
4664 vatype, valist, build_int_cst (pt, 0));
4667 return valist;
4670 /* The "standard" definition of va_list is void*. */
4672 tree
4673 std_build_builtin_va_list (void)
4675 return ptr_type_node;
4678 /* The "standard" abi va_list is va_list_type_node. */
4680 tree
4681 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4683 return va_list_type_node;
4686 /* The "standard" type of va_list is va_list_type_node. */
4688 tree
4689 std_canonical_va_list_type (tree type)
4691 tree wtype, htype;
4693 wtype = va_list_type_node;
4694 htype = type;
4696 if (TREE_CODE (wtype) == ARRAY_TYPE)
4698 /* If va_list is an array type, the argument may have decayed
4699 to a pointer type, e.g. by being passed to another function.
4700 In that case, unwrap both types so that we can compare the
4701 underlying records. */
4702 if (TREE_CODE (htype) == ARRAY_TYPE
4703 || POINTER_TYPE_P (htype))
4705 wtype = TREE_TYPE (wtype);
4706 htype = TREE_TYPE (htype);
4709 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4710 return va_list_type_node;
4712 return NULL_TREE;
4715 /* The "standard" implementation of va_start: just assign `nextarg' to
4716 the variable. */
4718 void
4719 std_expand_builtin_va_start (tree valist, rtx nextarg)
4721 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4722 convert_move (va_r, nextarg, 0);
4725 /* Expand EXP, a call to __builtin_va_start. */
4727 static rtx
4728 expand_builtin_va_start (tree exp)
4730 rtx nextarg;
4731 tree valist;
4732 location_t loc = EXPR_LOCATION (exp);
4734 if (call_expr_nargs (exp) < 2)
4736 error_at (loc, "too few arguments to function %<va_start%>");
4737 return const0_rtx;
4740 if (fold_builtin_next_arg (exp, true))
4741 return const0_rtx;
4743 nextarg = expand_builtin_next_arg ();
4744 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4746 if (targetm.expand_builtin_va_start)
4747 targetm.expand_builtin_va_start (valist, nextarg);
4748 else
4749 std_expand_builtin_va_start (valist, nextarg);
4751 return const0_rtx;
4754 /* Expand EXP, a call to __builtin_va_end. */
4756 static rtx
4757 expand_builtin_va_end (tree exp)
4759 tree valist = CALL_EXPR_ARG (exp, 0);
4761 /* Evaluate for side effects, if needed. I hate macros that don't
4762 do that. */
4763 if (TREE_SIDE_EFFECTS (valist))
4764 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4766 return const0_rtx;
4769 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4770 builtin rather than just as an assignment in stdarg.h because of the
4771 nastiness of array-type va_list types. */
4773 static rtx
4774 expand_builtin_va_copy (tree exp)
4776 tree dst, src, t;
4777 location_t loc = EXPR_LOCATION (exp);
4779 dst = CALL_EXPR_ARG (exp, 0);
4780 src = CALL_EXPR_ARG (exp, 1);
4782 dst = stabilize_va_list_loc (loc, dst, 1);
4783 src = stabilize_va_list_loc (loc, src, 0);
4785 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4787 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4789 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4790 TREE_SIDE_EFFECTS (t) = 1;
4791 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4793 else
4795 rtx dstb, srcb, size;
4797 /* Evaluate to pointers. */
4798 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4799 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4800 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4801 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4803 dstb = convert_memory_address (Pmode, dstb);
4804 srcb = convert_memory_address (Pmode, srcb);
4806 /* "Dereference" to BLKmode memories. */
4807 dstb = gen_rtx_MEM (BLKmode, dstb);
4808 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4809 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4810 srcb = gen_rtx_MEM (BLKmode, srcb);
4811 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4812 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4814 /* Copy. */
4815 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4818 return const0_rtx;
4821 /* Expand a call to one of the builtin functions __builtin_frame_address or
4822 __builtin_return_address. */
4824 static rtx
4825 expand_builtin_frame_address (tree fndecl, tree exp)
4827 /* The argument must be a nonnegative integer constant.
4828 It counts the number of frames to scan up the stack.
4829 The value is either the frame pointer value or the return
4830 address saved in that frame. */
4831 if (call_expr_nargs (exp) == 0)
4832 /* Warning about missing arg was already issued. */
4833 return const0_rtx;
4834 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4836 error ("invalid argument to %qD", fndecl);
4837 return const0_rtx;
4839 else
4841 /* Number of frames to scan up the stack. */
4842 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4844 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4846 /* Some ports cannot access arbitrary stack frames. */
4847 if (tem == NULL)
4849 warning (0, "unsupported argument to %qD", fndecl);
4850 return const0_rtx;
4853 if (count)
4855 /* Warn since no effort is made to ensure that any frame
4856 beyond the current one exists or can be safely reached. */
4857 warning (OPT_Wframe_address, "calling %qD with "
4858 "a nonzero argument is unsafe", fndecl);
4861 /* For __builtin_frame_address, return what we've got. */
4862 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4863 return tem;
4865 if (!REG_P (tem)
4866 && ! CONSTANT_P (tem))
4867 tem = copy_addr_to_reg (tem);
4868 return tem;
4872 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4873 failed and the caller should emit a normal call. */
4875 static rtx
4876 expand_builtin_alloca (tree exp)
4878 rtx op0;
4879 rtx result;
4880 unsigned int align;
4881 tree fndecl = get_callee_fndecl (exp);
4882 HOST_WIDE_INT max_size;
4883 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4884 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4885 bool valid_arglist
4886 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4887 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4888 VOID_TYPE)
4889 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4890 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4891 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4893 if (!valid_arglist)
4894 return NULL_RTX;
4896 /* Compute the argument. */
4897 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4899 /* Compute the alignment. */
4900 align = (fcode == BUILT_IN_ALLOCA
4901 ? BIGGEST_ALIGNMENT
4902 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4904 /* Compute the maximum size. */
4905 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4906 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4907 : -1);
4909 /* Allocate the desired space. If the allocation stems from the declaration
4910 of a variable-sized object, it cannot accumulate. */
4911 result
4912 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4913 result = convert_memory_address (ptr_mode, result);
4915 /* Dynamic allocations for variables are recorded during gimplification. */
4916 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4917 record_dynamic_alloc (exp);
4919 return result;
4922 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
4923 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4924 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
4925 handle_builtin_stack_restore function. */
4927 static rtx
4928 expand_asan_emit_allocas_unpoison (tree exp)
4930 tree arg0 = CALL_EXPR_ARG (exp, 0);
4931 tree arg1 = CALL_EXPR_ARG (exp, 1);
4932 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4933 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4934 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
4935 stack_pointer_rtx, NULL_RTX, 0,
4936 OPTAB_LIB_WIDEN);
4937 off = convert_modes (ptr_mode, Pmode, off, 0);
4938 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
4939 OPTAB_LIB_WIDEN);
4940 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4941 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4942 top, ptr_mode, bot, ptr_mode);
4943 return ret;
4946 /* Expand a call to bswap builtin in EXP.
4947 Return NULL_RTX if a normal call should be emitted rather than expanding the
4948 function in-line. If convenient, the result should be placed in TARGET.
4949 SUBTARGET may be used as the target for computing one of EXP's operands. */
4951 static rtx
4952 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4953 rtx subtarget)
4955 tree arg;
4956 rtx op0;
4958 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4959 return NULL_RTX;
4961 arg = CALL_EXPR_ARG (exp, 0);
4962 op0 = expand_expr (arg,
4963 subtarget && GET_MODE (subtarget) == target_mode
4964 ? subtarget : NULL_RTX,
4965 target_mode, EXPAND_NORMAL);
4966 if (GET_MODE (op0) != target_mode)
4967 op0 = convert_to_mode (target_mode, op0, 1);
4969 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4971 gcc_assert (target);
4973 return convert_to_mode (target_mode, target, 1);
4976 /* Expand a call to a unary builtin in EXP.
4977 Return NULL_RTX if a normal call should be emitted rather than expanding the
4978 function in-line. If convenient, the result should be placed in TARGET.
4979 SUBTARGET may be used as the target for computing one of EXP's operands. */
4981 static rtx
4982 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4983 rtx subtarget, optab op_optab)
4985 rtx op0;
4987 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4988 return NULL_RTX;
4990 /* Compute the argument. */
4991 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4992 (subtarget
4993 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4994 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4995 VOIDmode, EXPAND_NORMAL);
4996 /* Compute op, into TARGET if possible.
4997 Set TARGET to wherever the result comes back. */
4998 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4999 op_optab, op0, target, op_optab != clrsb_optab);
5000 gcc_assert (target);
5002 return convert_to_mode (target_mode, target, 0);
5005 /* Expand a call to __builtin_expect. We just return our argument
5006 as the builtin_expect semantic should've been already executed by
5007 tree branch prediction pass. */
5009 static rtx
5010 expand_builtin_expect (tree exp, rtx target)
5012 tree arg;
5014 if (call_expr_nargs (exp) < 2)
5015 return const0_rtx;
5016 arg = CALL_EXPR_ARG (exp, 0);
5018 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5019 /* When guessing was done, the hints should be already stripped away. */
5020 gcc_assert (!flag_guess_branch_prob
5021 || optimize == 0 || seen_error ());
5022 return target;
5025 /* Expand a call to __builtin_expect_with_probability. We just return our
5026 argument as the builtin_expect semantic should've been already executed by
5027 tree branch prediction pass. */
5029 static rtx
5030 expand_builtin_expect_with_probability (tree exp, rtx target)
5032 tree arg;
5034 if (call_expr_nargs (exp) < 3)
5035 return const0_rtx;
5036 arg = CALL_EXPR_ARG (exp, 0);
5038 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5039 /* When guessing was done, the hints should be already stripped away. */
5040 gcc_assert (!flag_guess_branch_prob
5041 || optimize == 0 || seen_error ());
5042 return target;
5046 /* Expand a call to __builtin_assume_aligned. We just return our first
5047 argument as the builtin_assume_aligned semantic should've been already
5048 executed by CCP. */
5050 static rtx
5051 expand_builtin_assume_aligned (tree exp, rtx target)
5053 if (call_expr_nargs (exp) < 2)
5054 return const0_rtx;
5055 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5056 EXPAND_NORMAL);
5057 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5058 && (call_expr_nargs (exp) < 3
5059 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5060 return target;
5063 void
5064 expand_builtin_trap (void)
5066 if (targetm.have_trap ())
5068 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5069 /* For trap insns when not accumulating outgoing args force
5070 REG_ARGS_SIZE note to prevent crossjumping of calls with
5071 different args sizes. */
5072 if (!ACCUMULATE_OUTGOING_ARGS)
5073 add_args_size_note (insn, stack_pointer_delta);
5075 else
5077 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5078 tree call_expr = build_call_expr (fn, 0);
5079 expand_call (call_expr, NULL_RTX, false);
5082 emit_barrier ();
5085 /* Expand a call to __builtin_unreachable. We do nothing except emit
5086 a barrier saying that control flow will not pass here.
5088 It is the responsibility of the program being compiled to ensure
5089 that control flow does never reach __builtin_unreachable. */
5090 static void
5091 expand_builtin_unreachable (void)
5093 emit_barrier ();
5096 /* Expand EXP, a call to fabs, fabsf or fabsl.
5097 Return NULL_RTX if a normal call should be emitted rather than expanding
5098 the function inline. If convenient, the result should be placed
5099 in TARGET. SUBTARGET may be used as the target for computing
5100 the operand. */
5102 static rtx
5103 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5105 machine_mode mode;
5106 tree arg;
5107 rtx op0;
5109 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5110 return NULL_RTX;
5112 arg = CALL_EXPR_ARG (exp, 0);
5113 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5114 mode = TYPE_MODE (TREE_TYPE (arg));
5115 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5116 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5119 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5120 Return NULL is a normal call should be emitted rather than expanding the
5121 function inline. If convenient, the result should be placed in TARGET.
5122 SUBTARGET may be used as the target for computing the operand. */
5124 static rtx
5125 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5127 rtx op0, op1;
5128 tree arg;
5130 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5131 return NULL_RTX;
5133 arg = CALL_EXPR_ARG (exp, 0);
5134 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5136 arg = CALL_EXPR_ARG (exp, 1);
5137 op1 = expand_normal (arg);
5139 return expand_copysign (op0, op1, target);
5142 /* Emit a call to __builtin___clear_cache. */
5144 void
5145 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5147 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5148 BUILTIN_ASM_NAME_PTR
5149 (BUILT_IN_CLEAR_CACHE));
5151 emit_library_call (callee,
5152 LCT_NORMAL, VOIDmode,
5153 convert_memory_address (ptr_mode, begin), ptr_mode,
5154 convert_memory_address (ptr_mode, end), ptr_mode);
5157 /* Emit a call to __builtin___clear_cache, unless the target specifies
5158 it as do-nothing. This function can be used by trampoline
5159 finalizers to duplicate the effects of expanding a call to the
5160 clear_cache builtin. */
5162 void
5163 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5165 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5166 || CONST_INT_P (begin))
5167 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5168 || CONST_INT_P (end)));
5170 if (targetm.have_clear_cache ())
5172 /* We have a "clear_cache" insn, and it will handle everything. */
5173 class expand_operand ops[2];
5175 create_address_operand (&ops[0], begin);
5176 create_address_operand (&ops[1], end);
5178 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5179 return;
5181 else
5183 #ifndef CLEAR_INSN_CACHE
5184 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5185 does nothing. There is no need to call it. Do nothing. */
5186 return;
5187 #endif /* CLEAR_INSN_CACHE */
5190 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5193 /* Expand a call to __builtin___clear_cache. */
5195 static void
5196 expand_builtin___clear_cache (tree exp)
5198 tree begin, end;
5199 rtx begin_rtx, end_rtx;
5201 /* We must not expand to a library call. If we did, any
5202 fallback library function in libgcc that might contain a call to
5203 __builtin___clear_cache() would recurse infinitely. */
5204 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5206 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5207 return;
5210 begin = CALL_EXPR_ARG (exp, 0);
5211 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5213 end = CALL_EXPR_ARG (exp, 1);
5214 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5216 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5219 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5221 static rtx
5222 round_trampoline_addr (rtx tramp)
5224 rtx temp, addend, mask;
5226 /* If we don't need too much alignment, we'll have been guaranteed
5227 proper alignment by get_trampoline_type. */
5228 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5229 return tramp;
5231 /* Round address up to desired boundary. */
5232 temp = gen_reg_rtx (Pmode);
5233 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5234 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5236 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5237 temp, 0, OPTAB_LIB_WIDEN);
5238 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5239 temp, 0, OPTAB_LIB_WIDEN);
5241 return tramp;
5244 static rtx
5245 expand_builtin_init_trampoline (tree exp, bool onstack)
5247 tree t_tramp, t_func, t_chain;
5248 rtx m_tramp, r_tramp, r_chain, tmp;
5250 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5251 POINTER_TYPE, VOID_TYPE))
5252 return NULL_RTX;
5254 t_tramp = CALL_EXPR_ARG (exp, 0);
5255 t_func = CALL_EXPR_ARG (exp, 1);
5256 t_chain = CALL_EXPR_ARG (exp, 2);
5258 r_tramp = expand_normal (t_tramp);
5259 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5260 MEM_NOTRAP_P (m_tramp) = 1;
5262 /* If ONSTACK, the TRAMP argument should be the address of a field
5263 within the local function's FRAME decl. Either way, let's see if
5264 we can fill in the MEM_ATTRs for this memory. */
5265 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5266 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5268 /* Creator of a heap trampoline is responsible for making sure the
5269 address is aligned to at least STACK_BOUNDARY. Normally malloc
5270 will ensure this anyhow. */
5271 tmp = round_trampoline_addr (r_tramp);
5272 if (tmp != r_tramp)
5274 m_tramp = change_address (m_tramp, BLKmode, tmp);
5275 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5276 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5279 /* The FUNC argument should be the address of the nested function.
5280 Extract the actual function decl to pass to the hook. */
5281 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5282 t_func = TREE_OPERAND (t_func, 0);
5283 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5285 r_chain = expand_normal (t_chain);
5287 /* Generate insns to initialize the trampoline. */
5288 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5290 if (onstack)
5292 trampolines_created = 1;
5294 if (targetm.calls.custom_function_descriptors != 0)
5295 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5296 "trampoline generated for nested function %qD", t_func);
5299 return const0_rtx;
5302 static rtx
5303 expand_builtin_adjust_trampoline (tree exp)
5305 rtx tramp;
5307 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5308 return NULL_RTX;
5310 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5311 tramp = round_trampoline_addr (tramp);
5312 if (targetm.calls.trampoline_adjust_address)
5313 tramp = targetm.calls.trampoline_adjust_address (tramp);
5315 return tramp;
5318 /* Expand a call to the builtin descriptor initialization routine.
5319 A descriptor is made up of a couple of pointers to the static
5320 chain and the code entry in this order. */
5322 static rtx
5323 expand_builtin_init_descriptor (tree exp)
5325 tree t_descr, t_func, t_chain;
5326 rtx m_descr, r_descr, r_func, r_chain;
5328 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5329 VOID_TYPE))
5330 return NULL_RTX;
5332 t_descr = CALL_EXPR_ARG (exp, 0);
5333 t_func = CALL_EXPR_ARG (exp, 1);
5334 t_chain = CALL_EXPR_ARG (exp, 2);
5336 r_descr = expand_normal (t_descr);
5337 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5338 MEM_NOTRAP_P (m_descr) = 1;
5339 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5341 r_func = expand_normal (t_func);
5342 r_chain = expand_normal (t_chain);
5344 /* Generate insns to initialize the descriptor. */
5345 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5346 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5347 POINTER_SIZE / BITS_PER_UNIT), r_func);
5349 return const0_rtx;
5352 /* Expand a call to the builtin descriptor adjustment routine. */
5354 static rtx
5355 expand_builtin_adjust_descriptor (tree exp)
5357 rtx tramp;
5359 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5360 return NULL_RTX;
5362 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5364 /* Unalign the descriptor to allow runtime identification. */
5365 tramp = plus_constant (ptr_mode, tramp,
5366 targetm.calls.custom_function_descriptors);
5368 return force_operand (tramp, NULL_RTX);
5371 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5372 function. The function first checks whether the back end provides
5373 an insn to implement signbit for the respective mode. If not, it
5374 checks whether the floating point format of the value is such that
5375 the sign bit can be extracted. If that is not the case, error out.
5376 EXP is the expression that is a call to the builtin function; if
5377 convenient, the result should be placed in TARGET. */
5378 static rtx
5379 expand_builtin_signbit (tree exp, rtx target)
5381 const struct real_format *fmt;
5382 scalar_float_mode fmode;
5383 scalar_int_mode rmode, imode;
5384 tree arg;
5385 int word, bitpos;
5386 enum insn_code icode;
5387 rtx temp;
5388 location_t loc = EXPR_LOCATION (exp);
5390 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5391 return NULL_RTX;
5393 arg = CALL_EXPR_ARG (exp, 0);
5394 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5395 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5396 fmt = REAL_MODE_FORMAT (fmode);
5398 arg = builtin_save_expr (arg);
5400 /* Expand the argument yielding a RTX expression. */
5401 temp = expand_normal (arg);
5403 /* Check if the back end provides an insn that handles signbit for the
5404 argument's mode. */
5405 icode = optab_handler (signbit_optab, fmode);
5406 if (icode != CODE_FOR_nothing)
5408 rtx_insn *last = get_last_insn ();
5409 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5410 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5411 return target;
5412 delete_insns_since (last);
5415 /* For floating point formats without a sign bit, implement signbit
5416 as "ARG < 0.0". */
5417 bitpos = fmt->signbit_ro;
5418 if (bitpos < 0)
5420 /* But we can't do this if the format supports signed zero. */
5421 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5423 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5424 build_real (TREE_TYPE (arg), dconst0));
5425 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5428 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5430 imode = int_mode_for_mode (fmode).require ();
5431 temp = gen_lowpart (imode, temp);
5433 else
5435 imode = word_mode;
5436 /* Handle targets with different FP word orders. */
5437 if (FLOAT_WORDS_BIG_ENDIAN)
5438 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5439 else
5440 word = bitpos / BITS_PER_WORD;
5441 temp = operand_subword_force (temp, word, fmode);
5442 bitpos = bitpos % BITS_PER_WORD;
5445 /* Force the intermediate word_mode (or narrower) result into a
5446 register. This avoids attempting to create paradoxical SUBREGs
5447 of floating point modes below. */
5448 temp = force_reg (imode, temp);
5450 /* If the bitpos is within the "result mode" lowpart, the operation
5451 can be implement with a single bitwise AND. Otherwise, we need
5452 a right shift and an AND. */
5454 if (bitpos < GET_MODE_BITSIZE (rmode))
5456 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5458 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5459 temp = gen_lowpart (rmode, temp);
5460 temp = expand_binop (rmode, and_optab, temp,
5461 immed_wide_int_const (mask, rmode),
5462 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5464 else
5466 /* Perform a logical right shift to place the signbit in the least
5467 significant bit, then truncate the result to the desired mode
5468 and mask just this bit. */
5469 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5470 temp = gen_lowpart (rmode, temp);
5471 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5472 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5475 return temp;
5478 /* Expand fork or exec calls. TARGET is the desired target of the
5479 call. EXP is the call. FN is the
5480 identificator of the actual function. IGNORE is nonzero if the
5481 value is to be ignored. */
5483 static rtx
5484 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5486 tree id, decl;
5487 tree call;
5489 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
5491 tree path = CALL_EXPR_ARG (exp, 0);
5492 /* Detect unterminated path. */
5493 if (!check_read_access (exp, path))
5494 return NULL_RTX;
5496 /* Also detect unterminated first argument. */
5497 switch (DECL_FUNCTION_CODE (fn))
5499 case BUILT_IN_EXECL:
5500 case BUILT_IN_EXECLE:
5501 case BUILT_IN_EXECLP:
5502 if (!check_read_access (exp, path))
5503 return NULL_RTX;
5504 default:
5505 break;
5510 /* If we are not profiling, just call the function. */
5511 if (!profile_arc_flag)
5512 return NULL_RTX;
5514 /* Otherwise call the wrapper. This should be equivalent for the rest of
5515 compiler, so the code does not diverge, and the wrapper may run the
5516 code necessary for keeping the profiling sane. */
5518 switch (DECL_FUNCTION_CODE (fn))
5520 case BUILT_IN_FORK:
5521 id = get_identifier ("__gcov_fork");
5522 break;
5524 case BUILT_IN_EXECL:
5525 id = get_identifier ("__gcov_execl");
5526 break;
5528 case BUILT_IN_EXECV:
5529 id = get_identifier ("__gcov_execv");
5530 break;
5532 case BUILT_IN_EXECLP:
5533 id = get_identifier ("__gcov_execlp");
5534 break;
5536 case BUILT_IN_EXECLE:
5537 id = get_identifier ("__gcov_execle");
5538 break;
5540 case BUILT_IN_EXECVP:
5541 id = get_identifier ("__gcov_execvp");
5542 break;
5544 case BUILT_IN_EXECVE:
5545 id = get_identifier ("__gcov_execve");
5546 break;
5548 default:
5549 gcc_unreachable ();
5552 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5553 FUNCTION_DECL, id, TREE_TYPE (fn));
5554 DECL_EXTERNAL (decl) = 1;
5555 TREE_PUBLIC (decl) = 1;
5556 DECL_ARTIFICIAL (decl) = 1;
5557 TREE_NOTHROW (decl) = 1;
5558 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5559 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5560 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5561 return expand_call (call, target, ignore);
5566 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5567 the pointer in these functions is void*, the tree optimizers may remove
5568 casts. The mode computed in expand_builtin isn't reliable either, due
5569 to __sync_bool_compare_and_swap.
5571 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5572 group of builtins. This gives us log2 of the mode size. */
5574 static inline machine_mode
5575 get_builtin_sync_mode (int fcode_diff)
5577 /* The size is not negotiable, so ask not to get BLKmode in return
5578 if the target indicates that a smaller size would be better. */
5579 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5582 /* Expand the memory expression LOC and return the appropriate memory operand
5583 for the builtin_sync operations. */
5585 static rtx
5586 get_builtin_sync_mem (tree loc, machine_mode mode)
5588 rtx addr, mem;
5589 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5590 ? TREE_TYPE (TREE_TYPE (loc))
5591 : TREE_TYPE (loc));
5592 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5594 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5595 addr = convert_memory_address (addr_mode, addr);
5597 /* Note that we explicitly do not want any alias information for this
5598 memory, so that we kill all other live memories. Otherwise we don't
5599 satisfy the full barrier semantics of the intrinsic. */
5600 mem = gen_rtx_MEM (mode, addr);
5602 set_mem_addr_space (mem, addr_space);
5604 mem = validize_mem (mem);
5606 /* The alignment needs to be at least according to that of the mode. */
5607 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5608 get_pointer_alignment (loc)));
5609 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5610 MEM_VOLATILE_P (mem) = 1;
5612 return mem;
5615 /* Make sure an argument is in the right mode.
5616 EXP is the tree argument.
5617 MODE is the mode it should be in. */
5619 static rtx
5620 expand_expr_force_mode (tree exp, machine_mode mode)
5622 rtx val;
5623 machine_mode old_mode;
5625 if (TREE_CODE (exp) == SSA_NAME
5626 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5628 /* Undo argument promotion if possible, as combine might not
5629 be able to do it later due to MEM_VOLATILE_P uses in the
5630 patterns. */
5631 gimple *g = get_gimple_for_ssa_name (exp);
5632 if (g && gimple_assign_cast_p (g))
5634 tree rhs = gimple_assign_rhs1 (g);
5635 tree_code code = gimple_assign_rhs_code (g);
5636 if (CONVERT_EXPR_CODE_P (code)
5637 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5638 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5639 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5640 && (TYPE_PRECISION (TREE_TYPE (exp))
5641 > TYPE_PRECISION (TREE_TYPE (rhs))))
5642 exp = rhs;
5646 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5647 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5648 of CONST_INTs, where we know the old_mode only from the call argument. */
5650 old_mode = GET_MODE (val);
5651 if (old_mode == VOIDmode)
5652 old_mode = TYPE_MODE (TREE_TYPE (exp));
5653 val = convert_modes (mode, old_mode, val, 1);
5654 return val;
5658 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5659 EXP is the CALL_EXPR. CODE is the rtx code
5660 that corresponds to the arithmetic or logical operation from the name;
5661 an exception here is that NOT actually means NAND. TARGET is an optional
5662 place for us to store the results; AFTER is true if this is the
5663 fetch_and_xxx form. */
5665 static rtx
5666 expand_builtin_sync_operation (machine_mode mode, tree exp,
5667 enum rtx_code code, bool after,
5668 rtx target)
5670 rtx val, mem;
5671 location_t loc = EXPR_LOCATION (exp);
5673 if (code == NOT && warn_sync_nand)
5675 tree fndecl = get_callee_fndecl (exp);
5676 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5678 static bool warned_f_a_n, warned_n_a_f;
5680 switch (fcode)
5682 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5683 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5684 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5685 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5686 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5687 if (warned_f_a_n)
5688 break;
5690 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5691 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5692 warned_f_a_n = true;
5693 break;
5695 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5696 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5697 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5698 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5699 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5700 if (warned_n_a_f)
5701 break;
5703 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5704 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5705 warned_n_a_f = true;
5706 break;
5708 default:
5709 gcc_unreachable ();
5713 /* Expand the operands. */
5714 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5715 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5717 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5718 after);
5721 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5722 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5723 true if this is the boolean form. TARGET is a place for us to store the
5724 results; this is NOT optional if IS_BOOL is true. */
5726 static rtx
5727 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5728 bool is_bool, rtx target)
5730 rtx old_val, new_val, mem;
5731 rtx *pbool, *poval;
5733 /* Expand the operands. */
5734 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5735 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5736 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5738 pbool = poval = NULL;
5739 if (target != const0_rtx)
5741 if (is_bool)
5742 pbool = &target;
5743 else
5744 poval = &target;
5746 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5747 false, MEMMODEL_SYNC_SEQ_CST,
5748 MEMMODEL_SYNC_SEQ_CST))
5749 return NULL_RTX;
5751 return target;
5754 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5755 general form is actually an atomic exchange, and some targets only
5756 support a reduced form with the second argument being a constant 1.
5757 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5758 the results. */
5760 static rtx
5761 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5762 rtx target)
5764 rtx val, mem;
5766 /* Expand the operands. */
5767 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5768 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5770 return expand_sync_lock_test_and_set (target, mem, val);
5773 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5775 static void
5776 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5778 rtx mem;
5780 /* Expand the operands. */
5781 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5783 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5786 /* Given an integer representing an ``enum memmodel'', verify its
5787 correctness and return the memory model enum. */
5789 static enum memmodel
5790 get_memmodel (tree exp)
5792 rtx op;
5793 unsigned HOST_WIDE_INT val;
5794 location_t loc
5795 = expansion_point_location_if_in_system_header (input_location);
5797 /* If the parameter is not a constant, it's a run time value so we'll just
5798 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5799 if (TREE_CODE (exp) != INTEGER_CST)
5800 return MEMMODEL_SEQ_CST;
5802 op = expand_normal (exp);
5804 val = INTVAL (op);
5805 if (targetm.memmodel_check)
5806 val = targetm.memmodel_check (val);
5807 else if (val & ~MEMMODEL_MASK)
5809 warning_at (loc, OPT_Winvalid_memory_model,
5810 "unknown architecture specifier in memory model to builtin");
5811 return MEMMODEL_SEQ_CST;
5814 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5815 if (memmodel_base (val) >= MEMMODEL_LAST)
5817 warning_at (loc, OPT_Winvalid_memory_model,
5818 "invalid memory model argument to builtin");
5819 return MEMMODEL_SEQ_CST;
5822 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5823 be conservative and promote consume to acquire. */
5824 if (val == MEMMODEL_CONSUME)
5825 val = MEMMODEL_ACQUIRE;
5827 return (enum memmodel) val;
5830 /* Expand the __atomic_exchange intrinsic:
5831 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5832 EXP is the CALL_EXPR.
5833 TARGET is an optional place for us to store the results. */
5835 static rtx
5836 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5838 rtx val, mem;
5839 enum memmodel model;
5841 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5843 if (!flag_inline_atomics)
5844 return NULL_RTX;
5846 /* Expand the operands. */
5847 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5848 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5850 return expand_atomic_exchange (target, mem, val, model);
5853 /* Expand the __atomic_compare_exchange intrinsic:
5854 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5855 TYPE desired, BOOL weak,
5856 enum memmodel success,
5857 enum memmodel failure)
5858 EXP is the CALL_EXPR.
5859 TARGET is an optional place for us to store the results. */
5861 static rtx
5862 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5863 rtx target)
5865 rtx expect, desired, mem, oldval;
5866 rtx_code_label *label;
5867 enum memmodel success, failure;
5868 tree weak;
5869 bool is_weak;
5870 location_t loc
5871 = expansion_point_location_if_in_system_header (input_location);
5873 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5874 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5876 if (failure > success)
5878 warning_at (loc, OPT_Winvalid_memory_model,
5879 "failure memory model cannot be stronger than success "
5880 "memory model for %<__atomic_compare_exchange%>");
5881 success = MEMMODEL_SEQ_CST;
5884 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5886 warning_at (loc, OPT_Winvalid_memory_model,
5887 "invalid failure memory model for "
5888 "%<__atomic_compare_exchange%>");
5889 failure = MEMMODEL_SEQ_CST;
5890 success = MEMMODEL_SEQ_CST;
5894 if (!flag_inline_atomics)
5895 return NULL_RTX;
5897 /* Expand the operands. */
5898 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5900 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5901 expect = convert_memory_address (Pmode, expect);
5902 expect = gen_rtx_MEM (mode, expect);
5903 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5905 weak = CALL_EXPR_ARG (exp, 3);
5906 is_weak = false;
5907 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5908 is_weak = true;
5910 if (target == const0_rtx)
5911 target = NULL;
5913 /* Lest the rtl backend create a race condition with an imporoper store
5914 to memory, always create a new pseudo for OLDVAL. */
5915 oldval = NULL;
5917 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5918 is_weak, success, failure))
5919 return NULL_RTX;
5921 /* Conditionally store back to EXPECT, lest we create a race condition
5922 with an improper store to memory. */
5923 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5924 the normal case where EXPECT is totally private, i.e. a register. At
5925 which point the store can be unconditional. */
5926 label = gen_label_rtx ();
5927 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5928 GET_MODE (target), 1, label);
5929 emit_move_insn (expect, oldval);
5930 emit_label (label);
5932 return target;
5935 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5936 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5937 call. The weak parameter must be dropped to match the expected parameter
5938 list and the expected argument changed from value to pointer to memory
5939 slot. */
5941 static void
5942 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5944 unsigned int z;
5945 vec<tree, va_gc> *vec;
5947 vec_alloc (vec, 5);
5948 vec->quick_push (gimple_call_arg (call, 0));
5949 tree expected = gimple_call_arg (call, 1);
5950 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5951 TREE_TYPE (expected));
5952 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5953 if (expd != x)
5954 emit_move_insn (x, expd);
5955 tree v = make_tree (TREE_TYPE (expected), x);
5956 vec->quick_push (build1 (ADDR_EXPR,
5957 build_pointer_type (TREE_TYPE (expected)), v));
5958 vec->quick_push (gimple_call_arg (call, 2));
5959 /* Skip the boolean weak parameter. */
5960 for (z = 4; z < 6; z++)
5961 vec->quick_push (gimple_call_arg (call, z));
5962 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5963 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5964 gcc_assert (bytes_log2 < 5);
5965 built_in_function fncode
5966 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5967 + bytes_log2);
5968 tree fndecl = builtin_decl_explicit (fncode);
5969 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5970 fndecl);
5971 tree exp = build_call_vec (boolean_type_node, fn, vec);
5972 tree lhs = gimple_call_lhs (call);
5973 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5974 if (lhs)
5976 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5977 if (GET_MODE (boolret) != mode)
5978 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5979 x = force_reg (mode, x);
5980 write_complex_part (target, boolret, true);
5981 write_complex_part (target, x, false);
5985 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5987 void
5988 expand_ifn_atomic_compare_exchange (gcall *call)
5990 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5991 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5992 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5993 rtx expect, desired, mem, oldval, boolret;
5994 enum memmodel success, failure;
5995 tree lhs;
5996 bool is_weak;
5997 location_t loc
5998 = expansion_point_location_if_in_system_header (gimple_location (call));
6000 success = get_memmodel (gimple_call_arg (call, 4));
6001 failure = get_memmodel (gimple_call_arg (call, 5));
6003 if (failure > success)
6005 warning_at (loc, OPT_Winvalid_memory_model,
6006 "failure memory model cannot be stronger than success "
6007 "memory model for %<__atomic_compare_exchange%>");
6008 success = MEMMODEL_SEQ_CST;
6011 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6013 warning_at (loc, OPT_Winvalid_memory_model,
6014 "invalid failure memory model for "
6015 "%<__atomic_compare_exchange%>");
6016 failure = MEMMODEL_SEQ_CST;
6017 success = MEMMODEL_SEQ_CST;
6020 if (!flag_inline_atomics)
6022 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6023 return;
6026 /* Expand the operands. */
6027 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6029 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6030 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6032 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6034 boolret = NULL;
6035 oldval = NULL;
6037 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6038 is_weak, success, failure))
6040 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6041 return;
6044 lhs = gimple_call_lhs (call);
6045 if (lhs)
6047 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6048 if (GET_MODE (boolret) != mode)
6049 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6050 write_complex_part (target, boolret, true);
6051 write_complex_part (target, oldval, false);
6055 /* Expand the __atomic_load intrinsic:
6056 TYPE __atomic_load (TYPE *object, enum memmodel)
6057 EXP is the CALL_EXPR.
6058 TARGET is an optional place for us to store the results. */
6060 static rtx
6061 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6063 rtx mem;
6064 enum memmodel model;
6066 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6067 if (is_mm_release (model) || is_mm_acq_rel (model))
6069 location_t loc
6070 = expansion_point_location_if_in_system_header (input_location);
6071 warning_at (loc, OPT_Winvalid_memory_model,
6072 "invalid memory model for %<__atomic_load%>");
6073 model = MEMMODEL_SEQ_CST;
6076 if (!flag_inline_atomics)
6077 return NULL_RTX;
6079 /* Expand the operand. */
6080 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6082 return expand_atomic_load (target, mem, model);
6086 /* Expand the __atomic_store intrinsic:
6087 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6088 EXP is the CALL_EXPR.
6089 TARGET is an optional place for us to store the results. */
6091 static rtx
6092 expand_builtin_atomic_store (machine_mode mode, tree exp)
6094 rtx mem, val;
6095 enum memmodel model;
6097 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6098 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6099 || is_mm_release (model)))
6101 location_t loc
6102 = expansion_point_location_if_in_system_header (input_location);
6103 warning_at (loc, OPT_Winvalid_memory_model,
6104 "invalid memory model for %<__atomic_store%>");
6105 model = MEMMODEL_SEQ_CST;
6108 if (!flag_inline_atomics)
6109 return NULL_RTX;
6111 /* Expand the operands. */
6112 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6113 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6115 return expand_atomic_store (mem, val, model, false);
6118 /* Expand the __atomic_fetch_XXX intrinsic:
6119 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6120 EXP is the CALL_EXPR.
6121 TARGET is an optional place for us to store the results.
6122 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6123 FETCH_AFTER is true if returning the result of the operation.
6124 FETCH_AFTER is false if returning the value before the operation.
6125 IGNORE is true if the result is not used.
6126 EXT_CALL is the correct builtin for an external call if this cannot be
6127 resolved to an instruction sequence. */
6129 static rtx
6130 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6131 enum rtx_code code, bool fetch_after,
6132 bool ignore, enum built_in_function ext_call)
6134 rtx val, mem, ret;
6135 enum memmodel model;
6136 tree fndecl;
6137 tree addr;
6139 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6141 /* Expand the operands. */
6142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6143 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6145 /* Only try generating instructions if inlining is turned on. */
6146 if (flag_inline_atomics)
6148 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6149 if (ret)
6150 return ret;
6153 /* Return if a different routine isn't needed for the library call. */
6154 if (ext_call == BUILT_IN_NONE)
6155 return NULL_RTX;
6157 /* Change the call to the specified function. */
6158 fndecl = get_callee_fndecl (exp);
6159 addr = CALL_EXPR_FN (exp);
6160 STRIP_NOPS (addr);
6162 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6163 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6165 /* If we will emit code after the call, the call cannot be a tail call.
6166 If it is emitted as a tail call, a barrier is emitted after it, and
6167 then all trailing code is removed. */
6168 if (!ignore)
6169 CALL_EXPR_TAILCALL (exp) = 0;
6171 /* Expand the call here so we can emit trailing code. */
6172 ret = expand_call (exp, target, ignore);
6174 /* Replace the original function just in case it matters. */
6175 TREE_OPERAND (addr, 0) = fndecl;
6177 /* Then issue the arithmetic correction to return the right result. */
6178 if (!ignore)
6180 if (code == NOT)
6182 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6183 OPTAB_LIB_WIDEN);
6184 ret = expand_simple_unop (mode, NOT, ret, target, true);
6186 else
6187 ret = expand_simple_binop (mode, code, ret, val, target, true,
6188 OPTAB_LIB_WIDEN);
6190 return ret;
6193 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6195 void
6196 expand_ifn_atomic_bit_test_and (gcall *call)
6198 tree ptr = gimple_call_arg (call, 0);
6199 tree bit = gimple_call_arg (call, 1);
6200 tree flag = gimple_call_arg (call, 2);
6201 tree lhs = gimple_call_lhs (call);
6202 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6203 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6204 enum rtx_code code;
6205 optab optab;
6206 class expand_operand ops[5];
6208 gcc_assert (flag_inline_atomics);
6210 if (gimple_call_num_args (call) == 4)
6211 model = get_memmodel (gimple_call_arg (call, 3));
6213 rtx mem = get_builtin_sync_mem (ptr, mode);
6214 rtx val = expand_expr_force_mode (bit, mode);
6216 switch (gimple_call_internal_fn (call))
6218 case IFN_ATOMIC_BIT_TEST_AND_SET:
6219 code = IOR;
6220 optab = atomic_bit_test_and_set_optab;
6221 break;
6222 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6223 code = XOR;
6224 optab = atomic_bit_test_and_complement_optab;
6225 break;
6226 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6227 code = AND;
6228 optab = atomic_bit_test_and_reset_optab;
6229 break;
6230 default:
6231 gcc_unreachable ();
6234 if (lhs == NULL_TREE)
6236 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6237 val, NULL_RTX, true, OPTAB_DIRECT);
6238 if (code == AND)
6239 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6240 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6241 return;
6244 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6245 enum insn_code icode = direct_optab_handler (optab, mode);
6246 gcc_assert (icode != CODE_FOR_nothing);
6247 create_output_operand (&ops[0], target, mode);
6248 create_fixed_operand (&ops[1], mem);
6249 create_convert_operand_to (&ops[2], val, mode, true);
6250 create_integer_operand (&ops[3], model);
6251 create_integer_operand (&ops[4], integer_onep (flag));
6252 if (maybe_expand_insn (icode, 5, ops))
6253 return;
6255 rtx bitval = val;
6256 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6257 val, NULL_RTX, true, OPTAB_DIRECT);
6258 rtx maskval = val;
6259 if (code == AND)
6260 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6261 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6262 code, model, false);
6263 if (integer_onep (flag))
6265 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6266 NULL_RTX, true, OPTAB_DIRECT);
6267 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6268 true, OPTAB_DIRECT);
6270 else
6271 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6272 OPTAB_DIRECT);
6273 if (result != target)
6274 emit_move_insn (target, result);
6277 /* Expand an atomic clear operation.
6278 void _atomic_clear (BOOL *obj, enum memmodel)
6279 EXP is the call expression. */
6281 static rtx
6282 expand_builtin_atomic_clear (tree exp)
6284 machine_mode mode;
6285 rtx mem, ret;
6286 enum memmodel model;
6288 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6289 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6290 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6292 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6294 location_t loc
6295 = expansion_point_location_if_in_system_header (input_location);
6296 warning_at (loc, OPT_Winvalid_memory_model,
6297 "invalid memory model for %<__atomic_store%>");
6298 model = MEMMODEL_SEQ_CST;
6301 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6302 Failing that, a store is issued by __atomic_store. The only way this can
6303 fail is if the bool type is larger than a word size. Unlikely, but
6304 handle it anyway for completeness. Assume a single threaded model since
6305 there is no atomic support in this case, and no barriers are required. */
6306 ret = expand_atomic_store (mem, const0_rtx, model, true);
6307 if (!ret)
6308 emit_move_insn (mem, const0_rtx);
6309 return const0_rtx;
6312 /* Expand an atomic test_and_set operation.
6313 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6314 EXP is the call expression. */
6316 static rtx
6317 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6319 rtx mem;
6320 enum memmodel model;
6321 machine_mode mode;
6323 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6324 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6325 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6327 return expand_atomic_test_and_set (target, mem, model);
6331 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6332 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6334 static tree
6335 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6337 int size;
6338 machine_mode mode;
6339 unsigned int mode_align, type_align;
6341 if (TREE_CODE (arg0) != INTEGER_CST)
6342 return NULL_TREE;
6344 /* We need a corresponding integer mode for the access to be lock-free. */
6345 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6346 if (!int_mode_for_size (size, 0).exists (&mode))
6347 return boolean_false_node;
6349 mode_align = GET_MODE_ALIGNMENT (mode);
6351 if (TREE_CODE (arg1) == INTEGER_CST)
6353 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6355 /* Either this argument is null, or it's a fake pointer encoding
6356 the alignment of the object. */
6357 val = least_bit_hwi (val);
6358 val *= BITS_PER_UNIT;
6360 if (val == 0 || mode_align < val)
6361 type_align = mode_align;
6362 else
6363 type_align = val;
6365 else
6367 tree ttype = TREE_TYPE (arg1);
6369 /* This function is usually invoked and folded immediately by the front
6370 end before anything else has a chance to look at it. The pointer
6371 parameter at this point is usually cast to a void *, so check for that
6372 and look past the cast. */
6373 if (CONVERT_EXPR_P (arg1)
6374 && POINTER_TYPE_P (ttype)
6375 && VOID_TYPE_P (TREE_TYPE (ttype))
6376 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6377 arg1 = TREE_OPERAND (arg1, 0);
6379 ttype = TREE_TYPE (arg1);
6380 gcc_assert (POINTER_TYPE_P (ttype));
6382 /* Get the underlying type of the object. */
6383 ttype = TREE_TYPE (ttype);
6384 type_align = TYPE_ALIGN (ttype);
6387 /* If the object has smaller alignment, the lock free routines cannot
6388 be used. */
6389 if (type_align < mode_align)
6390 return boolean_false_node;
6392 /* Check if a compare_and_swap pattern exists for the mode which represents
6393 the required size. The pattern is not allowed to fail, so the existence
6394 of the pattern indicates support is present. Also require that an
6395 atomic load exists for the required size. */
6396 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6397 return boolean_true_node;
6398 else
6399 return boolean_false_node;
6402 /* Return true if the parameters to call EXP represent an object which will
6403 always generate lock free instructions. The first argument represents the
6404 size of the object, and the second parameter is a pointer to the object
6405 itself. If NULL is passed for the object, then the result is based on
6406 typical alignment for an object of the specified size. Otherwise return
6407 false. */
6409 static rtx
6410 expand_builtin_atomic_always_lock_free (tree exp)
6412 tree size;
6413 tree arg0 = CALL_EXPR_ARG (exp, 0);
6414 tree arg1 = CALL_EXPR_ARG (exp, 1);
6416 if (TREE_CODE (arg0) != INTEGER_CST)
6418 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6419 return const0_rtx;
6422 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6423 if (size == boolean_true_node)
6424 return const1_rtx;
6425 return const0_rtx;
6428 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6429 is lock free on this architecture. */
6431 static tree
6432 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6434 if (!flag_inline_atomics)
6435 return NULL_TREE;
6437 /* If it isn't always lock free, don't generate a result. */
6438 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6439 return boolean_true_node;
6441 return NULL_TREE;
6444 /* Return true if the parameters to call EXP represent an object which will
6445 always generate lock free instructions. The first argument represents the
6446 size of the object, and the second parameter is a pointer to the object
6447 itself. If NULL is passed for the object, then the result is based on
6448 typical alignment for an object of the specified size. Otherwise return
6449 NULL*/
6451 static rtx
6452 expand_builtin_atomic_is_lock_free (tree exp)
6454 tree size;
6455 tree arg0 = CALL_EXPR_ARG (exp, 0);
6456 tree arg1 = CALL_EXPR_ARG (exp, 1);
6458 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6460 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6461 return NULL_RTX;
6464 if (!flag_inline_atomics)
6465 return NULL_RTX;
6467 /* If the value is known at compile time, return the RTX for it. */
6468 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6469 if (size == boolean_true_node)
6470 return const1_rtx;
6472 return NULL_RTX;
6475 /* Expand the __atomic_thread_fence intrinsic:
6476 void __atomic_thread_fence (enum memmodel)
6477 EXP is the CALL_EXPR. */
6479 static void
6480 expand_builtin_atomic_thread_fence (tree exp)
6482 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6483 expand_mem_thread_fence (model);
6486 /* Expand the __atomic_signal_fence intrinsic:
6487 void __atomic_signal_fence (enum memmodel)
6488 EXP is the CALL_EXPR. */
6490 static void
6491 expand_builtin_atomic_signal_fence (tree exp)
6493 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6494 expand_mem_signal_fence (model);
6497 /* Expand the __sync_synchronize intrinsic. */
6499 static void
6500 expand_builtin_sync_synchronize (void)
6502 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6505 static rtx
6506 expand_builtin_thread_pointer (tree exp, rtx target)
6508 enum insn_code icode;
6509 if (!validate_arglist (exp, VOID_TYPE))
6510 return const0_rtx;
6511 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6512 if (icode != CODE_FOR_nothing)
6514 class expand_operand op;
6515 /* If the target is not sutitable then create a new target. */
6516 if (target == NULL_RTX
6517 || !REG_P (target)
6518 || GET_MODE (target) != Pmode)
6519 target = gen_reg_rtx (Pmode);
6520 create_output_operand (&op, target, Pmode);
6521 expand_insn (icode, 1, &op);
6522 return target;
6524 error ("%<__builtin_thread_pointer%> is not supported on this target");
6525 return const0_rtx;
6528 static void
6529 expand_builtin_set_thread_pointer (tree exp)
6531 enum insn_code icode;
6532 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6533 return;
6534 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6535 if (icode != CODE_FOR_nothing)
6537 class expand_operand op;
6538 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6539 Pmode, EXPAND_NORMAL);
6540 create_input_operand (&op, val, Pmode);
6541 expand_insn (icode, 1, &op);
6542 return;
6544 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6548 /* Emit code to restore the current value of stack. */
6550 static void
6551 expand_stack_restore (tree var)
6553 rtx_insn *prev;
6554 rtx sa = expand_normal (var);
6556 sa = convert_memory_address (Pmode, sa);
6558 prev = get_last_insn ();
6559 emit_stack_restore (SAVE_BLOCK, sa);
6561 record_new_stack_level ();
6563 fixup_args_size_notes (prev, get_last_insn (), 0);
6566 /* Emit code to save the current value of stack. */
6568 static rtx
6569 expand_stack_save (void)
6571 rtx ret = NULL_RTX;
6573 emit_stack_save (SAVE_BLOCK, &ret);
6574 return ret;
6577 /* Emit code to get the openacc gang, worker or vector id or size. */
6579 static rtx
6580 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6582 const char *name;
6583 rtx fallback_retval;
6584 rtx_insn *(*gen_fn) (rtx, rtx);
6585 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6587 case BUILT_IN_GOACC_PARLEVEL_ID:
6588 name = "__builtin_goacc_parlevel_id";
6589 fallback_retval = const0_rtx;
6590 gen_fn = targetm.gen_oacc_dim_pos;
6591 break;
6592 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6593 name = "__builtin_goacc_parlevel_size";
6594 fallback_retval = const1_rtx;
6595 gen_fn = targetm.gen_oacc_dim_size;
6596 break;
6597 default:
6598 gcc_unreachable ();
6601 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6603 error ("%qs only supported in OpenACC code", name);
6604 return const0_rtx;
6607 tree arg = CALL_EXPR_ARG (exp, 0);
6608 if (TREE_CODE (arg) != INTEGER_CST)
6610 error ("non-constant argument 0 to %qs", name);
6611 return const0_rtx;
6614 int dim = TREE_INT_CST_LOW (arg);
6615 switch (dim)
6617 case GOMP_DIM_GANG:
6618 case GOMP_DIM_WORKER:
6619 case GOMP_DIM_VECTOR:
6620 break;
6621 default:
6622 error ("illegal argument 0 to %qs", name);
6623 return const0_rtx;
6626 if (ignore)
6627 return target;
6629 if (target == NULL_RTX)
6630 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6632 if (!targetm.have_oacc_dim_size ())
6634 emit_move_insn (target, fallback_retval);
6635 return target;
6638 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6639 emit_insn (gen_fn (reg, GEN_INT (dim)));
6640 if (reg != target)
6641 emit_move_insn (target, reg);
6643 return target;
6646 /* Expand a string compare operation using a sequence of char comparison
6647 to get rid of the calling overhead, with result going to TARGET if
6648 that's convenient.
6650 VAR_STR is the variable string source;
6651 CONST_STR is the constant string source;
6652 LENGTH is the number of chars to compare;
6653 CONST_STR_N indicates which source string is the constant string;
6654 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6656 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6658 target = (int) (unsigned char) var_str[0]
6659 - (int) (unsigned char) const_str[0];
6660 if (target != 0)
6661 goto ne_label;
6663 target = (int) (unsigned char) var_str[length - 2]
6664 - (int) (unsigned char) const_str[length - 2];
6665 if (target != 0)
6666 goto ne_label;
6667 target = (int) (unsigned char) var_str[length - 1]
6668 - (int) (unsigned char) const_str[length - 1];
6669 ne_label:
6672 static rtx
6673 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6674 unsigned HOST_WIDE_INT length,
6675 int const_str_n, machine_mode mode)
6677 HOST_WIDE_INT offset = 0;
6678 rtx var_rtx_array
6679 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6680 rtx var_rtx = NULL_RTX;
6681 rtx const_rtx = NULL_RTX;
6682 rtx result = target ? target : gen_reg_rtx (mode);
6683 rtx_code_label *ne_label = gen_label_rtx ();
6684 tree unit_type_node = unsigned_char_type_node;
6685 scalar_int_mode unit_mode
6686 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6688 start_sequence ();
6690 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6692 var_rtx
6693 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6694 const_rtx = c_readstr (const_str + offset, unit_mode);
6695 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6696 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6698 op0 = convert_modes (mode, unit_mode, op0, 1);
6699 op1 = convert_modes (mode, unit_mode, op1, 1);
6700 result = expand_simple_binop (mode, MINUS, op0, op1,
6701 result, 1, OPTAB_WIDEN);
6702 if (i < length - 1)
6703 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6704 mode, true, ne_label);
6705 offset += GET_MODE_SIZE (unit_mode);
6708 emit_label (ne_label);
6709 rtx_insn *insns = get_insns ();
6710 end_sequence ();
6711 emit_insn (insns);
6713 return result;
6716 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6717 to TARGET if that's convenient.
6718 If the call is not been inlined, return NULL_RTX. */
6720 static rtx
6721 inline_expand_builtin_bytecmp (tree exp, rtx target)
6723 tree fndecl = get_callee_fndecl (exp);
6724 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6725 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6727 /* Do NOT apply this inlining expansion when optimizing for size or
6728 optimization level below 2. */
6729 if (optimize < 2 || optimize_insn_for_size_p ())
6730 return NULL_RTX;
6732 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6733 || fcode == BUILT_IN_STRNCMP
6734 || fcode == BUILT_IN_MEMCMP);
6736 /* On a target where the type of the call (int) has same or narrower presicion
6737 than unsigned char, give up the inlining expansion. */
6738 if (TYPE_PRECISION (unsigned_char_type_node)
6739 >= TYPE_PRECISION (TREE_TYPE (exp)))
6740 return NULL_RTX;
6742 tree arg1 = CALL_EXPR_ARG (exp, 0);
6743 tree arg2 = CALL_EXPR_ARG (exp, 1);
6744 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6746 unsigned HOST_WIDE_INT len1 = 0;
6747 unsigned HOST_WIDE_INT len2 = 0;
6748 unsigned HOST_WIDE_INT len3 = 0;
6750 /* Get the object representation of the initializers of ARG1 and ARG2
6751 as strings, provided they refer to constant objects, with their byte
6752 sizes in LEN1 and LEN2, respectively. */
6753 const char *bytes1 = getbyterep (arg1, &len1);
6754 const char *bytes2 = getbyterep (arg2, &len2);
6756 /* Fail if neither argument refers to an initialized constant. */
6757 if (!bytes1 && !bytes2)
6758 return NULL_RTX;
6760 if (is_ncmp)
6762 /* Fail if the memcmp/strncmp bound is not a constant. */
6763 if (!tree_fits_uhwi_p (len3_tree))
6764 return NULL_RTX;
6766 len3 = tree_to_uhwi (len3_tree);
6768 if (fcode == BUILT_IN_MEMCMP)
6770 /* Fail if the memcmp bound is greater than the size of either
6771 of the two constant objects. */
6772 if ((bytes1 && len1 < len3)
6773 || (bytes2 && len2 < len3))
6774 return NULL_RTX;
6778 if (fcode != BUILT_IN_MEMCMP)
6780 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6781 and LEN2 to the length of the nul-terminated string stored
6782 in each. */
6783 if (bytes1 != NULL)
6784 len1 = strnlen (bytes1, len1) + 1;
6785 if (bytes2 != NULL)
6786 len2 = strnlen (bytes2, len2) + 1;
6789 /* See inline_string_cmp. */
6790 int const_str_n;
6791 if (!len1)
6792 const_str_n = 2;
6793 else if (!len2)
6794 const_str_n = 1;
6795 else if (len2 > len1)
6796 const_str_n = 1;
6797 else
6798 const_str_n = 2;
6800 /* For strncmp only, compute the new bound as the smallest of
6801 the lengths of the two strings (plus 1) and the bound provided
6802 to the function. */
6803 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6804 if (is_ncmp && len3 < bound)
6805 bound = len3;
6807 /* If the bound of the comparison is larger than the threshold,
6808 do nothing. */
6809 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6810 return NULL_RTX;
6812 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6814 /* Now, start inline expansion the call. */
6815 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6816 (const_str_n == 1) ? bytes1 : bytes2, bound,
6817 const_str_n, mode);
6820 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6821 represents the size of the first argument to that call, or VOIDmode
6822 if the argument is a pointer. IGNORE will be true if the result
6823 isn't used. */
6824 static rtx
6825 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6826 bool ignore)
6828 rtx val, failsafe;
6829 unsigned nargs = call_expr_nargs (exp);
6831 tree arg0 = CALL_EXPR_ARG (exp, 0);
6833 if (mode == VOIDmode)
6835 mode = TYPE_MODE (TREE_TYPE (arg0));
6836 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6839 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6841 /* An optional second argument can be used as a failsafe value on
6842 some machines. If it isn't present, then the failsafe value is
6843 assumed to be 0. */
6844 if (nargs > 1)
6846 tree arg1 = CALL_EXPR_ARG (exp, 1);
6847 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6849 else
6850 failsafe = const0_rtx;
6852 /* If the result isn't used, the behavior is undefined. It would be
6853 nice to emit a warning here, but path splitting means this might
6854 happen with legitimate code. So simply drop the builtin
6855 expansion in that case; we've handled any side-effects above. */
6856 if (ignore)
6857 return const0_rtx;
6859 /* If we don't have a suitable target, create one to hold the result. */
6860 if (target == NULL || GET_MODE (target) != mode)
6861 target = gen_reg_rtx (mode);
6863 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6864 val = convert_modes (mode, VOIDmode, val, false);
6866 return targetm.speculation_safe_value (mode, target, val, failsafe);
6869 /* Expand an expression EXP that calls a built-in function,
6870 with result going to TARGET if that's convenient
6871 (and in mode MODE if that's convenient).
6872 SUBTARGET may be used as the target for computing one of EXP's operands.
6873 IGNORE is nonzero if the value is to be ignored. */
6876 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6877 int ignore)
6879 tree fndecl = get_callee_fndecl (exp);
6880 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6881 int flags;
6883 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6884 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6886 /* When ASan is enabled, we don't want to expand some memory/string
6887 builtins and rely on libsanitizer's hooks. This allows us to avoid
6888 redundant checks and be sure, that possible overflow will be detected
6889 by ASan. */
6891 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6892 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6893 return expand_call (exp, target, ignore);
6895 /* When not optimizing, generate calls to library functions for a certain
6896 set of builtins. */
6897 if (!optimize
6898 && !called_as_built_in (fndecl)
6899 && fcode != BUILT_IN_FORK
6900 && fcode != BUILT_IN_EXECL
6901 && fcode != BUILT_IN_EXECV
6902 && fcode != BUILT_IN_EXECLP
6903 && fcode != BUILT_IN_EXECLE
6904 && fcode != BUILT_IN_EXECVP
6905 && fcode != BUILT_IN_EXECVE
6906 && fcode != BUILT_IN_CLEAR_CACHE
6907 && !ALLOCA_FUNCTION_CODE_P (fcode)
6908 && fcode != BUILT_IN_FREE)
6909 return expand_call (exp, target, ignore);
6911 /* The built-in function expanders test for target == const0_rtx
6912 to determine whether the function's result will be ignored. */
6913 if (ignore)
6914 target = const0_rtx;
6916 /* If the result of a pure or const built-in function is ignored, and
6917 none of its arguments are volatile, we can avoid expanding the
6918 built-in call and just evaluate the arguments for side-effects. */
6919 if (target == const0_rtx
6920 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6921 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6923 bool volatilep = false;
6924 tree arg;
6925 call_expr_arg_iterator iter;
6927 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6928 if (TREE_THIS_VOLATILE (arg))
6930 volatilep = true;
6931 break;
6934 if (! volatilep)
6936 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6937 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6938 return const0_rtx;
6942 switch (fcode)
6944 CASE_FLT_FN (BUILT_IN_FABS):
6945 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6946 case BUILT_IN_FABSD32:
6947 case BUILT_IN_FABSD64:
6948 case BUILT_IN_FABSD128:
6949 target = expand_builtin_fabs (exp, target, subtarget);
6950 if (target)
6951 return target;
6952 break;
6954 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6955 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6956 target = expand_builtin_copysign (exp, target, subtarget);
6957 if (target)
6958 return target;
6959 break;
6961 /* Just do a normal library call if we were unable to fold
6962 the values. */
6963 CASE_FLT_FN (BUILT_IN_CABS):
6964 break;
6966 CASE_FLT_FN (BUILT_IN_FMA):
6967 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6968 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6969 if (target)
6970 return target;
6971 break;
6973 CASE_FLT_FN (BUILT_IN_ILOGB):
6974 if (! flag_unsafe_math_optimizations)
6975 break;
6976 gcc_fallthrough ();
6977 CASE_FLT_FN (BUILT_IN_ISINF):
6978 CASE_FLT_FN (BUILT_IN_FINITE):
6979 case BUILT_IN_ISFINITE:
6980 case BUILT_IN_ISNORMAL:
6981 target = expand_builtin_interclass_mathfn (exp, target);
6982 if (target)
6983 return target;
6984 break;
6986 CASE_FLT_FN (BUILT_IN_ICEIL):
6987 CASE_FLT_FN (BUILT_IN_LCEIL):
6988 CASE_FLT_FN (BUILT_IN_LLCEIL):
6989 CASE_FLT_FN (BUILT_IN_LFLOOR):
6990 CASE_FLT_FN (BUILT_IN_IFLOOR):
6991 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6992 target = expand_builtin_int_roundingfn (exp, target);
6993 if (target)
6994 return target;
6995 break;
6997 CASE_FLT_FN (BUILT_IN_IRINT):
6998 CASE_FLT_FN (BUILT_IN_LRINT):
6999 CASE_FLT_FN (BUILT_IN_LLRINT):
7000 CASE_FLT_FN (BUILT_IN_IROUND):
7001 CASE_FLT_FN (BUILT_IN_LROUND):
7002 CASE_FLT_FN (BUILT_IN_LLROUND):
7003 target = expand_builtin_int_roundingfn_2 (exp, target);
7004 if (target)
7005 return target;
7006 break;
7008 CASE_FLT_FN (BUILT_IN_POWI):
7009 target = expand_builtin_powi (exp, target);
7010 if (target)
7011 return target;
7012 break;
7014 CASE_FLT_FN (BUILT_IN_CEXPI):
7015 target = expand_builtin_cexpi (exp, target);
7016 gcc_assert (target);
7017 return target;
7019 CASE_FLT_FN (BUILT_IN_SIN):
7020 CASE_FLT_FN (BUILT_IN_COS):
7021 if (! flag_unsafe_math_optimizations)
7022 break;
7023 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7024 if (target)
7025 return target;
7026 break;
7028 CASE_FLT_FN (BUILT_IN_SINCOS):
7029 if (! flag_unsafe_math_optimizations)
7030 break;
7031 target = expand_builtin_sincos (exp);
7032 if (target)
7033 return target;
7034 break;
7036 case BUILT_IN_APPLY_ARGS:
7037 return expand_builtin_apply_args ();
7039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7040 FUNCTION with a copy of the parameters described by
7041 ARGUMENTS, and ARGSIZE. It returns a block of memory
7042 allocated on the stack into which is stored all the registers
7043 that might possibly be used for returning the result of a
7044 function. ARGUMENTS is the value returned by
7045 __builtin_apply_args. ARGSIZE is the number of bytes of
7046 arguments that must be copied. ??? How should this value be
7047 computed? We'll also need a safe worst case value for varargs
7048 functions. */
7049 case BUILT_IN_APPLY:
7050 if (!validate_arglist (exp, POINTER_TYPE,
7051 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7052 && !validate_arglist (exp, REFERENCE_TYPE,
7053 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7054 return const0_rtx;
7055 else
7057 rtx ops[3];
7059 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7060 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7061 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7063 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7066 /* __builtin_return (RESULT) causes the function to return the
7067 value described by RESULT. RESULT is address of the block of
7068 memory returned by __builtin_apply. */
7069 case BUILT_IN_RETURN:
7070 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7071 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7072 return const0_rtx;
7074 case BUILT_IN_SAVEREGS:
7075 return expand_builtin_saveregs ();
7077 case BUILT_IN_VA_ARG_PACK:
7078 /* All valid uses of __builtin_va_arg_pack () are removed during
7079 inlining. */
7080 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7081 return const0_rtx;
7083 case BUILT_IN_VA_ARG_PACK_LEN:
7084 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7085 inlining. */
7086 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7087 return const0_rtx;
7089 /* Return the address of the first anonymous stack arg. */
7090 case BUILT_IN_NEXT_ARG:
7091 if (fold_builtin_next_arg (exp, false))
7092 return const0_rtx;
7093 return expand_builtin_next_arg ();
7095 case BUILT_IN_CLEAR_CACHE:
7096 expand_builtin___clear_cache (exp);
7097 return const0_rtx;
7099 case BUILT_IN_CLASSIFY_TYPE:
7100 return expand_builtin_classify_type (exp);
7102 case BUILT_IN_CONSTANT_P:
7103 return const0_rtx;
7105 case BUILT_IN_FRAME_ADDRESS:
7106 case BUILT_IN_RETURN_ADDRESS:
7107 return expand_builtin_frame_address (fndecl, exp);
7109 /* Returns the address of the area where the structure is returned.
7110 0 otherwise. */
7111 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7112 if (call_expr_nargs (exp) != 0
7113 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7114 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7115 return const0_rtx;
7116 else
7117 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7119 CASE_BUILT_IN_ALLOCA:
7120 target = expand_builtin_alloca (exp);
7121 if (target)
7122 return target;
7123 break;
7125 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7126 return expand_asan_emit_allocas_unpoison (exp);
7128 case BUILT_IN_STACK_SAVE:
7129 return expand_stack_save ();
7131 case BUILT_IN_STACK_RESTORE:
7132 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7133 return const0_rtx;
7135 case BUILT_IN_BSWAP16:
7136 case BUILT_IN_BSWAP32:
7137 case BUILT_IN_BSWAP64:
7138 case BUILT_IN_BSWAP128:
7139 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7140 if (target)
7141 return target;
7142 break;
7144 CASE_INT_FN (BUILT_IN_FFS):
7145 target = expand_builtin_unop (target_mode, exp, target,
7146 subtarget, ffs_optab);
7147 if (target)
7148 return target;
7149 break;
7151 CASE_INT_FN (BUILT_IN_CLZ):
7152 target = expand_builtin_unop (target_mode, exp, target,
7153 subtarget, clz_optab);
7154 if (target)
7155 return target;
7156 break;
7158 CASE_INT_FN (BUILT_IN_CTZ):
7159 target = expand_builtin_unop (target_mode, exp, target,
7160 subtarget, ctz_optab);
7161 if (target)
7162 return target;
7163 break;
7165 CASE_INT_FN (BUILT_IN_CLRSB):
7166 target = expand_builtin_unop (target_mode, exp, target,
7167 subtarget, clrsb_optab);
7168 if (target)
7169 return target;
7170 break;
7172 CASE_INT_FN (BUILT_IN_POPCOUNT):
7173 target = expand_builtin_unop (target_mode, exp, target,
7174 subtarget, popcount_optab);
7175 if (target)
7176 return target;
7177 break;
7179 CASE_INT_FN (BUILT_IN_PARITY):
7180 target = expand_builtin_unop (target_mode, exp, target,
7181 subtarget, parity_optab);
7182 if (target)
7183 return target;
7184 break;
7186 case BUILT_IN_STRLEN:
7187 target = expand_builtin_strlen (exp, target, target_mode);
7188 if (target)
7189 return target;
7190 break;
7192 case BUILT_IN_STRNLEN:
7193 target = expand_builtin_strnlen (exp, target, target_mode);
7194 if (target)
7195 return target;
7196 break;
7198 case BUILT_IN_STRCPY:
7199 target = expand_builtin_strcpy (exp, target);
7200 if (target)
7201 return target;
7202 break;
7204 case BUILT_IN_STRNCPY:
7205 target = expand_builtin_strncpy (exp, target);
7206 if (target)
7207 return target;
7208 break;
7210 case BUILT_IN_STPCPY:
7211 target = expand_builtin_stpcpy (exp, target, mode);
7212 if (target)
7213 return target;
7214 break;
7216 case BUILT_IN_MEMCPY:
7217 target = expand_builtin_memcpy (exp, target);
7218 if (target)
7219 return target;
7220 break;
7222 case BUILT_IN_MEMMOVE:
7223 target = expand_builtin_memmove (exp, target);
7224 if (target)
7225 return target;
7226 break;
7228 case BUILT_IN_MEMPCPY:
7229 target = expand_builtin_mempcpy (exp, target);
7230 if (target)
7231 return target;
7232 break;
7234 case BUILT_IN_MEMSET:
7235 target = expand_builtin_memset (exp, target, mode);
7236 if (target)
7237 return target;
7238 break;
7240 case BUILT_IN_BZERO:
7241 target = expand_builtin_bzero (exp);
7242 if (target)
7243 return target;
7244 break;
7246 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7247 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7248 when changing it to a strcmp call. */
7249 case BUILT_IN_STRCMP_EQ:
7250 target = expand_builtin_memcmp (exp, target, true);
7251 if (target)
7252 return target;
7254 /* Change this call back to a BUILT_IN_STRCMP. */
7255 TREE_OPERAND (exp, 1)
7256 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7258 /* Delete the last parameter. */
7259 unsigned int i;
7260 vec<tree, va_gc> *arg_vec;
7261 vec_alloc (arg_vec, 2);
7262 for (i = 0; i < 2; i++)
7263 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7264 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7265 /* FALLTHROUGH */
7267 case BUILT_IN_STRCMP:
7268 target = expand_builtin_strcmp (exp, target);
7269 if (target)
7270 return target;
7271 break;
7273 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7274 back to a BUILT_IN_STRNCMP. */
7275 case BUILT_IN_STRNCMP_EQ:
7276 target = expand_builtin_memcmp (exp, target, true);
7277 if (target)
7278 return target;
7280 /* Change it back to a BUILT_IN_STRNCMP. */
7281 TREE_OPERAND (exp, 1)
7282 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7283 /* FALLTHROUGH */
7285 case BUILT_IN_STRNCMP:
7286 target = expand_builtin_strncmp (exp, target, mode);
7287 if (target)
7288 return target;
7289 break;
7291 case BUILT_IN_BCMP:
7292 case BUILT_IN_MEMCMP:
7293 case BUILT_IN_MEMCMP_EQ:
7294 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7295 if (target)
7296 return target;
7297 if (fcode == BUILT_IN_MEMCMP_EQ)
7299 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7300 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7302 break;
7304 case BUILT_IN_SETJMP:
7305 /* This should have been lowered to the builtins below. */
7306 gcc_unreachable ();
7308 case BUILT_IN_SETJMP_SETUP:
7309 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7310 and the receiver label. */
7311 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7313 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7314 VOIDmode, EXPAND_NORMAL);
7315 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7316 rtx_insn *label_r = label_rtx (label);
7318 /* This is copied from the handling of non-local gotos. */
7319 expand_builtin_setjmp_setup (buf_addr, label_r);
7320 nonlocal_goto_handler_labels
7321 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7322 nonlocal_goto_handler_labels);
7323 /* ??? Do not let expand_label treat us as such since we would
7324 not want to be both on the list of non-local labels and on
7325 the list of forced labels. */
7326 FORCED_LABEL (label) = 0;
7327 return const0_rtx;
7329 break;
7331 case BUILT_IN_SETJMP_RECEIVER:
7332 /* __builtin_setjmp_receiver is passed the receiver label. */
7333 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7335 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7336 rtx_insn *label_r = label_rtx (label);
7338 expand_builtin_setjmp_receiver (label_r);
7339 return const0_rtx;
7341 break;
7343 /* __builtin_longjmp is passed a pointer to an array of five words.
7344 It's similar to the C library longjmp function but works with
7345 __builtin_setjmp above. */
7346 case BUILT_IN_LONGJMP:
7347 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7349 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7350 VOIDmode, EXPAND_NORMAL);
7351 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7353 if (value != const1_rtx)
7355 error ("%<__builtin_longjmp%> second argument must be 1");
7356 return const0_rtx;
7359 expand_builtin_longjmp (buf_addr, value);
7360 return const0_rtx;
7362 break;
7364 case BUILT_IN_NONLOCAL_GOTO:
7365 target = expand_builtin_nonlocal_goto (exp);
7366 if (target)
7367 return target;
7368 break;
7370 /* This updates the setjmp buffer that is its argument with the value
7371 of the current stack pointer. */
7372 case BUILT_IN_UPDATE_SETJMP_BUF:
7373 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7375 rtx buf_addr
7376 = expand_normal (CALL_EXPR_ARG (exp, 0));
7378 expand_builtin_update_setjmp_buf (buf_addr);
7379 return const0_rtx;
7381 break;
7383 case BUILT_IN_TRAP:
7384 expand_builtin_trap ();
7385 return const0_rtx;
7387 case BUILT_IN_UNREACHABLE:
7388 expand_builtin_unreachable ();
7389 return const0_rtx;
7391 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7392 case BUILT_IN_SIGNBITD32:
7393 case BUILT_IN_SIGNBITD64:
7394 case BUILT_IN_SIGNBITD128:
7395 target = expand_builtin_signbit (exp, target);
7396 if (target)
7397 return target;
7398 break;
7400 /* Various hooks for the DWARF 2 __throw routine. */
7401 case BUILT_IN_UNWIND_INIT:
7402 expand_builtin_unwind_init ();
7403 return const0_rtx;
7404 case BUILT_IN_DWARF_CFA:
7405 return virtual_cfa_rtx;
7406 #ifdef DWARF2_UNWIND_INFO
7407 case BUILT_IN_DWARF_SP_COLUMN:
7408 return expand_builtin_dwarf_sp_column ();
7409 case BUILT_IN_INIT_DWARF_REG_SIZES:
7410 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7411 return const0_rtx;
7412 #endif
7413 case BUILT_IN_FROB_RETURN_ADDR:
7414 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7415 case BUILT_IN_EXTRACT_RETURN_ADDR:
7416 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7417 case BUILT_IN_EH_RETURN:
7418 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7419 CALL_EXPR_ARG (exp, 1));
7420 return const0_rtx;
7421 case BUILT_IN_EH_RETURN_DATA_REGNO:
7422 return expand_builtin_eh_return_data_regno (exp);
7423 case BUILT_IN_EXTEND_POINTER:
7424 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7425 case BUILT_IN_EH_POINTER:
7426 return expand_builtin_eh_pointer (exp);
7427 case BUILT_IN_EH_FILTER:
7428 return expand_builtin_eh_filter (exp);
7429 case BUILT_IN_EH_COPY_VALUES:
7430 return expand_builtin_eh_copy_values (exp);
7432 case BUILT_IN_VA_START:
7433 return expand_builtin_va_start (exp);
7434 case BUILT_IN_VA_END:
7435 return expand_builtin_va_end (exp);
7436 case BUILT_IN_VA_COPY:
7437 return expand_builtin_va_copy (exp);
7438 case BUILT_IN_EXPECT:
7439 return expand_builtin_expect (exp, target);
7440 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7441 return expand_builtin_expect_with_probability (exp, target);
7442 case BUILT_IN_ASSUME_ALIGNED:
7443 return expand_builtin_assume_aligned (exp, target);
7444 case BUILT_IN_PREFETCH:
7445 expand_builtin_prefetch (exp);
7446 return const0_rtx;
7448 case BUILT_IN_INIT_TRAMPOLINE:
7449 return expand_builtin_init_trampoline (exp, true);
7450 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7451 return expand_builtin_init_trampoline (exp, false);
7452 case BUILT_IN_ADJUST_TRAMPOLINE:
7453 return expand_builtin_adjust_trampoline (exp);
7455 case BUILT_IN_INIT_DESCRIPTOR:
7456 return expand_builtin_init_descriptor (exp);
7457 case BUILT_IN_ADJUST_DESCRIPTOR:
7458 return expand_builtin_adjust_descriptor (exp);
7460 case BUILT_IN_FORK:
7461 case BUILT_IN_EXECL:
7462 case BUILT_IN_EXECV:
7463 case BUILT_IN_EXECLP:
7464 case BUILT_IN_EXECLE:
7465 case BUILT_IN_EXECVP:
7466 case BUILT_IN_EXECVE:
7467 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7468 if (target)
7469 return target;
7470 break;
7472 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7473 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7474 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7475 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7476 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7478 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7479 if (target)
7480 return target;
7481 break;
7483 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7484 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7485 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7486 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7487 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7489 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7490 if (target)
7491 return target;
7492 break;
7494 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7495 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7496 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7497 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7498 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7500 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7501 if (target)
7502 return target;
7503 break;
7505 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7506 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7507 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7508 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7509 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7511 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7512 if (target)
7513 return target;
7514 break;
7516 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7517 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7518 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7519 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7520 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7522 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7523 if (target)
7524 return target;
7525 break;
7527 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7528 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7529 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7530 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7531 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7533 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7534 if (target)
7535 return target;
7536 break;
7538 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7539 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7540 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7541 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7542 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7544 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7550 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7551 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7552 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7553 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7555 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7556 if (target)
7557 return target;
7558 break;
7560 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7561 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7562 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7563 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7564 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7566 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7567 if (target)
7568 return target;
7569 break;
7571 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7572 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7573 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7574 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7575 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7577 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7578 if (target)
7579 return target;
7580 break;
7582 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7583 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7584 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7585 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7586 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7587 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7588 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7589 if (target)
7590 return target;
7591 break;
7593 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7594 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7595 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7596 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7597 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7599 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7600 if (target)
7601 return target;
7602 break;
7604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7608 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7609 if (mode == VOIDmode)
7610 mode = TYPE_MODE (boolean_type_node);
7611 if (!target || !register_operand (target, mode))
7612 target = gen_reg_rtx (mode);
7614 mode = get_builtin_sync_mode
7615 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7616 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7617 if (target)
7618 return target;
7619 break;
7621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7625 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7626 mode = get_builtin_sync_mode
7627 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7628 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7629 if (target)
7630 return target;
7631 break;
7633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7637 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7638 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7639 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7640 if (target)
7641 return target;
7642 break;
7644 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7645 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7646 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7647 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7648 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7649 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7650 expand_builtin_sync_lock_release (mode, exp);
7651 return const0_rtx;
7653 case BUILT_IN_SYNC_SYNCHRONIZE:
7654 expand_builtin_sync_synchronize ();
7655 return const0_rtx;
7657 case BUILT_IN_ATOMIC_EXCHANGE_1:
7658 case BUILT_IN_ATOMIC_EXCHANGE_2:
7659 case BUILT_IN_ATOMIC_EXCHANGE_4:
7660 case BUILT_IN_ATOMIC_EXCHANGE_8:
7661 case BUILT_IN_ATOMIC_EXCHANGE_16:
7662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7663 target = expand_builtin_atomic_exchange (mode, exp, target);
7664 if (target)
7665 return target;
7666 break;
7668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7672 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7674 unsigned int nargs, z;
7675 vec<tree, va_gc> *vec;
7677 mode =
7678 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7679 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7680 if (target)
7681 return target;
7683 /* If this is turned into an external library call, the weak parameter
7684 must be dropped to match the expected parameter list. */
7685 nargs = call_expr_nargs (exp);
7686 vec_alloc (vec, nargs - 1);
7687 for (z = 0; z < 3; z++)
7688 vec->quick_push (CALL_EXPR_ARG (exp, z));
7689 /* Skip the boolean weak parameter. */
7690 for (z = 4; z < 6; z++)
7691 vec->quick_push (CALL_EXPR_ARG (exp, z));
7692 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7693 break;
7696 case BUILT_IN_ATOMIC_LOAD_1:
7697 case BUILT_IN_ATOMIC_LOAD_2:
7698 case BUILT_IN_ATOMIC_LOAD_4:
7699 case BUILT_IN_ATOMIC_LOAD_8:
7700 case BUILT_IN_ATOMIC_LOAD_16:
7701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7702 target = expand_builtin_atomic_load (mode, exp, target);
7703 if (target)
7704 return target;
7705 break;
7707 case BUILT_IN_ATOMIC_STORE_1:
7708 case BUILT_IN_ATOMIC_STORE_2:
7709 case BUILT_IN_ATOMIC_STORE_4:
7710 case BUILT_IN_ATOMIC_STORE_8:
7711 case BUILT_IN_ATOMIC_STORE_16:
7712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7713 target = expand_builtin_atomic_store (mode, exp);
7714 if (target)
7715 return const0_rtx;
7716 break;
7718 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7719 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7720 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7721 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7722 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7724 enum built_in_function lib;
7725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7726 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7727 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7728 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7729 ignore, lib);
7730 if (target)
7731 return target;
7732 break;
7734 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7735 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7736 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7737 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7738 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7740 enum built_in_function lib;
7741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7742 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7743 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7744 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7745 ignore, lib);
7746 if (target)
7747 return target;
7748 break;
7750 case BUILT_IN_ATOMIC_AND_FETCH_1:
7751 case BUILT_IN_ATOMIC_AND_FETCH_2:
7752 case BUILT_IN_ATOMIC_AND_FETCH_4:
7753 case BUILT_IN_ATOMIC_AND_FETCH_8:
7754 case BUILT_IN_ATOMIC_AND_FETCH_16:
7756 enum built_in_function lib;
7757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7758 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7759 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7760 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7761 ignore, lib);
7762 if (target)
7763 return target;
7764 break;
7766 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7767 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7768 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7769 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7770 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7772 enum built_in_function lib;
7773 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7774 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7775 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7776 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7777 ignore, lib);
7778 if (target)
7779 return target;
7780 break;
7782 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7783 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7784 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7785 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7786 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7788 enum built_in_function lib;
7789 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7790 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7791 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7792 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7793 ignore, lib);
7794 if (target)
7795 return target;
7796 break;
7798 case BUILT_IN_ATOMIC_OR_FETCH_1:
7799 case BUILT_IN_ATOMIC_OR_FETCH_2:
7800 case BUILT_IN_ATOMIC_OR_FETCH_4:
7801 case BUILT_IN_ATOMIC_OR_FETCH_8:
7802 case BUILT_IN_ATOMIC_OR_FETCH_16:
7804 enum built_in_function lib;
7805 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7806 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7807 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7808 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7809 ignore, lib);
7810 if (target)
7811 return target;
7812 break;
7814 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7815 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7816 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7817 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7818 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7819 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7820 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7821 ignore, BUILT_IN_NONE);
7822 if (target)
7823 return target;
7824 break;
7826 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7827 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7828 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7829 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7830 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7831 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7832 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7833 ignore, BUILT_IN_NONE);
7834 if (target)
7835 return target;
7836 break;
7838 case BUILT_IN_ATOMIC_FETCH_AND_1:
7839 case BUILT_IN_ATOMIC_FETCH_AND_2:
7840 case BUILT_IN_ATOMIC_FETCH_AND_4:
7841 case BUILT_IN_ATOMIC_FETCH_AND_8:
7842 case BUILT_IN_ATOMIC_FETCH_AND_16:
7843 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7844 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7845 ignore, BUILT_IN_NONE);
7846 if (target)
7847 return target;
7848 break;
7850 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7851 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7852 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7853 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7854 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7855 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7856 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7857 ignore, BUILT_IN_NONE);
7858 if (target)
7859 return target;
7860 break;
7862 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7863 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7864 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7865 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7866 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7867 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7868 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7869 ignore, BUILT_IN_NONE);
7870 if (target)
7871 return target;
7872 break;
7874 case BUILT_IN_ATOMIC_FETCH_OR_1:
7875 case BUILT_IN_ATOMIC_FETCH_OR_2:
7876 case BUILT_IN_ATOMIC_FETCH_OR_4:
7877 case BUILT_IN_ATOMIC_FETCH_OR_8:
7878 case BUILT_IN_ATOMIC_FETCH_OR_16:
7879 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7880 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7881 ignore, BUILT_IN_NONE);
7882 if (target)
7883 return target;
7884 break;
7886 case BUILT_IN_ATOMIC_TEST_AND_SET:
7887 return expand_builtin_atomic_test_and_set (exp, target);
7889 case BUILT_IN_ATOMIC_CLEAR:
7890 return expand_builtin_atomic_clear (exp);
7892 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7893 return expand_builtin_atomic_always_lock_free (exp);
7895 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7896 target = expand_builtin_atomic_is_lock_free (exp);
7897 if (target)
7898 return target;
7899 break;
7901 case BUILT_IN_ATOMIC_THREAD_FENCE:
7902 expand_builtin_atomic_thread_fence (exp);
7903 return const0_rtx;
7905 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7906 expand_builtin_atomic_signal_fence (exp);
7907 return const0_rtx;
7909 case BUILT_IN_OBJECT_SIZE:
7910 return expand_builtin_object_size (exp);
7912 case BUILT_IN_MEMCPY_CHK:
7913 case BUILT_IN_MEMPCPY_CHK:
7914 case BUILT_IN_MEMMOVE_CHK:
7915 case BUILT_IN_MEMSET_CHK:
7916 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7917 if (target)
7918 return target;
7919 break;
7921 case BUILT_IN_STRCPY_CHK:
7922 case BUILT_IN_STPCPY_CHK:
7923 case BUILT_IN_STRNCPY_CHK:
7924 case BUILT_IN_STPNCPY_CHK:
7925 case BUILT_IN_STRCAT_CHK:
7926 case BUILT_IN_STRNCAT_CHK:
7927 case BUILT_IN_SNPRINTF_CHK:
7928 case BUILT_IN_VSNPRINTF_CHK:
7929 maybe_emit_chk_warning (exp, fcode);
7930 break;
7932 case BUILT_IN_SPRINTF_CHK:
7933 case BUILT_IN_VSPRINTF_CHK:
7934 maybe_emit_sprintf_chk_warning (exp, fcode);
7935 break;
7937 case BUILT_IN_THREAD_POINTER:
7938 return expand_builtin_thread_pointer (exp, target);
7940 case BUILT_IN_SET_THREAD_POINTER:
7941 expand_builtin_set_thread_pointer (exp);
7942 return const0_rtx;
7944 case BUILT_IN_ACC_ON_DEVICE:
7945 /* Do library call, if we failed to expand the builtin when
7946 folding. */
7947 break;
7949 case BUILT_IN_GOACC_PARLEVEL_ID:
7950 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7951 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7953 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
7954 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
7956 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
7957 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
7958 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
7959 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
7960 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
7961 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
7962 return expand_speculation_safe_value (mode, exp, target, ignore);
7964 default: /* just do library call, if unknown builtin */
7965 break;
7968 /* The switch statement above can drop through to cause the function
7969 to be called normally. */
7970 return expand_call (exp, target, ignore);
7973 /* Determine whether a tree node represents a call to a built-in
7974 function. If the tree T is a call to a built-in function with
7975 the right number of arguments of the appropriate types, return
7976 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7977 Otherwise the return value is END_BUILTINS. */
7979 enum built_in_function
7980 builtin_mathfn_code (const_tree t)
7982 const_tree fndecl, arg, parmlist;
7983 const_tree argtype, parmtype;
7984 const_call_expr_arg_iterator iter;
7986 if (TREE_CODE (t) != CALL_EXPR)
7987 return END_BUILTINS;
7989 fndecl = get_callee_fndecl (t);
7990 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7991 return END_BUILTINS;
7993 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7994 init_const_call_expr_arg_iterator (t, &iter);
7995 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7997 /* If a function doesn't take a variable number of arguments,
7998 the last element in the list will have type `void'. */
7999 parmtype = TREE_VALUE (parmlist);
8000 if (VOID_TYPE_P (parmtype))
8002 if (more_const_call_expr_args_p (&iter))
8003 return END_BUILTINS;
8004 return DECL_FUNCTION_CODE (fndecl);
8007 if (! more_const_call_expr_args_p (&iter))
8008 return END_BUILTINS;
8010 arg = next_const_call_expr_arg (&iter);
8011 argtype = TREE_TYPE (arg);
8013 if (SCALAR_FLOAT_TYPE_P (parmtype))
8015 if (! SCALAR_FLOAT_TYPE_P (argtype))
8016 return END_BUILTINS;
8018 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8020 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8021 return END_BUILTINS;
8023 else if (POINTER_TYPE_P (parmtype))
8025 if (! POINTER_TYPE_P (argtype))
8026 return END_BUILTINS;
8028 else if (INTEGRAL_TYPE_P (parmtype))
8030 if (! INTEGRAL_TYPE_P (argtype))
8031 return END_BUILTINS;
8033 else
8034 return END_BUILTINS;
8037 /* Variable-length argument list. */
8038 return DECL_FUNCTION_CODE (fndecl);
8041 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8042 evaluate to a constant. */
8044 static tree
8045 fold_builtin_constant_p (tree arg)
8047 /* We return 1 for a numeric type that's known to be a constant
8048 value at compile-time or for an aggregate type that's a
8049 literal constant. */
8050 STRIP_NOPS (arg);
8052 /* If we know this is a constant, emit the constant of one. */
8053 if (CONSTANT_CLASS_P (arg)
8054 || (TREE_CODE (arg) == CONSTRUCTOR
8055 && TREE_CONSTANT (arg)))
8056 return integer_one_node;
8057 if (TREE_CODE (arg) == ADDR_EXPR)
8059 tree op = TREE_OPERAND (arg, 0);
8060 if (TREE_CODE (op) == STRING_CST
8061 || (TREE_CODE (op) == ARRAY_REF
8062 && integer_zerop (TREE_OPERAND (op, 1))
8063 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8064 return integer_one_node;
8067 /* If this expression has side effects, show we don't know it to be a
8068 constant. Likewise if it's a pointer or aggregate type since in
8069 those case we only want literals, since those are only optimized
8070 when generating RTL, not later.
8071 And finally, if we are compiling an initializer, not code, we
8072 need to return a definite result now; there's not going to be any
8073 more optimization done. */
8074 if (TREE_SIDE_EFFECTS (arg)
8075 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8076 || POINTER_TYPE_P (TREE_TYPE (arg))
8077 || cfun == 0
8078 || folding_initializer
8079 || force_folding_builtin_constant_p)
8080 return integer_zero_node;
8082 return NULL_TREE;
8085 /* Create builtin_expect or builtin_expect_with_probability
8086 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8087 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8088 builtin_expect_with_probability instead uses third argument as PROBABILITY
8089 value. */
8091 static tree
8092 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8093 tree predictor, tree probability)
8095 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8097 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8098 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8099 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8100 ret_type = TREE_TYPE (TREE_TYPE (fn));
8101 pred_type = TREE_VALUE (arg_types);
8102 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8104 pred = fold_convert_loc (loc, pred_type, pred);
8105 expected = fold_convert_loc (loc, expected_type, expected);
8107 if (probability)
8108 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8109 else
8110 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8111 predictor);
8113 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8114 build_int_cst (ret_type, 0));
8117 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8118 NULL_TREE if no simplification is possible. */
8120 tree
8121 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8122 tree arg3)
8124 tree inner, fndecl, inner_arg0;
8125 enum tree_code code;
8127 /* Distribute the expected value over short-circuiting operators.
8128 See through the cast from truthvalue_type_node to long. */
8129 inner_arg0 = arg0;
8130 while (CONVERT_EXPR_P (inner_arg0)
8131 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8132 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8133 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8135 /* If this is a builtin_expect within a builtin_expect keep the
8136 inner one. See through a comparison against a constant. It
8137 might have been added to create a thruthvalue. */
8138 inner = inner_arg0;
8140 if (COMPARISON_CLASS_P (inner)
8141 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8142 inner = TREE_OPERAND (inner, 0);
8144 if (TREE_CODE (inner) == CALL_EXPR
8145 && (fndecl = get_callee_fndecl (inner))
8146 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8147 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8148 return arg0;
8150 inner = inner_arg0;
8151 code = TREE_CODE (inner);
8152 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8154 tree op0 = TREE_OPERAND (inner, 0);
8155 tree op1 = TREE_OPERAND (inner, 1);
8156 arg1 = save_expr (arg1);
8158 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8159 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8160 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8162 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8165 /* If the argument isn't invariant then there's nothing else we can do. */
8166 if (!TREE_CONSTANT (inner_arg0))
8167 return NULL_TREE;
8169 /* If we expect that a comparison against the argument will fold to
8170 a constant return the constant. In practice, this means a true
8171 constant or the address of a non-weak symbol. */
8172 inner = inner_arg0;
8173 STRIP_NOPS (inner);
8174 if (TREE_CODE (inner) == ADDR_EXPR)
8178 inner = TREE_OPERAND (inner, 0);
8180 while (TREE_CODE (inner) == COMPONENT_REF
8181 || TREE_CODE (inner) == ARRAY_REF);
8182 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8183 return NULL_TREE;
8186 /* Otherwise, ARG0 already has the proper type for the return value. */
8187 return arg0;
8190 /* Fold a call to __builtin_classify_type with argument ARG. */
8192 static tree
8193 fold_builtin_classify_type (tree arg)
8195 if (arg == 0)
8196 return build_int_cst (integer_type_node, no_type_class);
8198 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8201 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8202 ARG. */
8204 static tree
8205 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8207 if (!validate_arg (arg, POINTER_TYPE))
8208 return NULL_TREE;
8209 else
8211 c_strlen_data lendata = { };
8212 tree len = c_strlen (arg, 0, &lendata);
8214 if (len)
8215 return fold_convert_loc (loc, type, len);
8217 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8218 also early enough to detect invalid reads in multimensional
8219 arrays and struct members. */
8220 if (!lendata.decl)
8221 c_strlen (arg, 1, &lendata);
8223 if (lendata.decl)
8225 if (EXPR_HAS_LOCATION (arg))
8226 loc = EXPR_LOCATION (arg);
8227 else if (loc == UNKNOWN_LOCATION)
8228 loc = input_location;
8229 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8232 return NULL_TREE;
8236 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8238 static tree
8239 fold_builtin_inf (location_t loc, tree type, int warn)
8241 REAL_VALUE_TYPE real;
8243 /* __builtin_inff is intended to be usable to define INFINITY on all
8244 targets. If an infinity is not available, INFINITY expands "to a
8245 positive constant of type float that overflows at translation
8246 time", footnote "In this case, using INFINITY will violate the
8247 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8248 Thus we pedwarn to ensure this constraint violation is
8249 diagnosed. */
8250 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8251 pedwarn (loc, 0, "target format does not support infinity");
8253 real_inf (&real);
8254 return build_real (type, real);
8257 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8258 NULL_TREE if no simplification can be made. */
8260 static tree
8261 fold_builtin_sincos (location_t loc,
8262 tree arg0, tree arg1, tree arg2)
8264 tree type;
8265 tree fndecl, call = NULL_TREE;
8267 if (!validate_arg (arg0, REAL_TYPE)
8268 || !validate_arg (arg1, POINTER_TYPE)
8269 || !validate_arg (arg2, POINTER_TYPE))
8270 return NULL_TREE;
8272 type = TREE_TYPE (arg0);
8274 /* Calculate the result when the argument is a constant. */
8275 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8276 if (fn == END_BUILTINS)
8277 return NULL_TREE;
8279 /* Canonicalize sincos to cexpi. */
8280 if (TREE_CODE (arg0) == REAL_CST)
8282 tree complex_type = build_complex_type (type);
8283 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8285 if (!call)
8287 if (!targetm.libc_has_function (function_c99_math_complex, type)
8288 || !builtin_decl_implicit_p (fn))
8289 return NULL_TREE;
8290 fndecl = builtin_decl_explicit (fn);
8291 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8292 call = builtin_save_expr (call);
8295 tree ptype = build_pointer_type (type);
8296 arg1 = fold_convert (ptype, arg1);
8297 arg2 = fold_convert (ptype, arg2);
8298 return build2 (COMPOUND_EXPR, void_type_node,
8299 build2 (MODIFY_EXPR, void_type_node,
8300 build_fold_indirect_ref_loc (loc, arg1),
8301 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8302 build2 (MODIFY_EXPR, void_type_node,
8303 build_fold_indirect_ref_loc (loc, arg2),
8304 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8307 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8308 Return NULL_TREE if no simplification can be made. */
8310 static tree
8311 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8313 if (!validate_arg (arg1, POINTER_TYPE)
8314 || !validate_arg (arg2, POINTER_TYPE)
8315 || !validate_arg (len, INTEGER_TYPE))
8316 return NULL_TREE;
8318 /* If the LEN parameter is zero, return zero. */
8319 if (integer_zerop (len))
8320 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8321 arg1, arg2);
8323 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8324 if (operand_equal_p (arg1, arg2, 0))
8325 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8327 /* If len parameter is one, return an expression corresponding to
8328 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8329 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8331 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8332 tree cst_uchar_ptr_node
8333 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8335 tree ind1
8336 = fold_convert_loc (loc, integer_type_node,
8337 build1 (INDIRECT_REF, cst_uchar_node,
8338 fold_convert_loc (loc,
8339 cst_uchar_ptr_node,
8340 arg1)));
8341 tree ind2
8342 = fold_convert_loc (loc, integer_type_node,
8343 build1 (INDIRECT_REF, cst_uchar_node,
8344 fold_convert_loc (loc,
8345 cst_uchar_ptr_node,
8346 arg2)));
8347 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8350 return NULL_TREE;
8353 /* Fold a call to builtin isascii with argument ARG. */
8355 static tree
8356 fold_builtin_isascii (location_t loc, tree arg)
8358 if (!validate_arg (arg, INTEGER_TYPE))
8359 return NULL_TREE;
8360 else
8362 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8363 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8364 build_int_cst (integer_type_node,
8365 ~ (unsigned HOST_WIDE_INT) 0x7f));
8366 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8367 arg, integer_zero_node);
8371 /* Fold a call to builtin toascii with argument ARG. */
8373 static tree
8374 fold_builtin_toascii (location_t loc, tree arg)
8376 if (!validate_arg (arg, INTEGER_TYPE))
8377 return NULL_TREE;
8379 /* Transform toascii(c) -> (c & 0x7f). */
8380 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8381 build_int_cst (integer_type_node, 0x7f));
8384 /* Fold a call to builtin isdigit with argument ARG. */
8386 static tree
8387 fold_builtin_isdigit (location_t loc, tree arg)
8389 if (!validate_arg (arg, INTEGER_TYPE))
8390 return NULL_TREE;
8391 else
8393 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8394 /* According to the C standard, isdigit is unaffected by locale.
8395 However, it definitely is affected by the target character set. */
8396 unsigned HOST_WIDE_INT target_digit0
8397 = lang_hooks.to_target_charset ('0');
8399 if (target_digit0 == 0)
8400 return NULL_TREE;
8402 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8403 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8404 build_int_cst (unsigned_type_node, target_digit0));
8405 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8406 build_int_cst (unsigned_type_node, 9));
8410 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8412 static tree
8413 fold_builtin_fabs (location_t loc, tree arg, tree type)
8415 if (!validate_arg (arg, REAL_TYPE))
8416 return NULL_TREE;
8418 arg = fold_convert_loc (loc, type, arg);
8419 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8422 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8424 static tree
8425 fold_builtin_abs (location_t loc, tree arg, tree type)
8427 if (!validate_arg (arg, INTEGER_TYPE))
8428 return NULL_TREE;
8430 arg = fold_convert_loc (loc, type, arg);
8431 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8434 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8436 static tree
8437 fold_builtin_carg (location_t loc, tree arg, tree type)
8439 if (validate_arg (arg, COMPLEX_TYPE)
8440 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8442 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8444 if (atan2_fn)
8446 tree new_arg = builtin_save_expr (arg);
8447 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8448 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8449 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8453 return NULL_TREE;
8456 /* Fold a call to builtin frexp, we can assume the base is 2. */
8458 static tree
8459 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8461 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8462 return NULL_TREE;
8464 STRIP_NOPS (arg0);
8466 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8467 return NULL_TREE;
8469 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8471 /* Proceed if a valid pointer type was passed in. */
8472 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8474 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8475 tree frac, exp;
8477 switch (value->cl)
8479 case rvc_zero:
8480 /* For +-0, return (*exp = 0, +-0). */
8481 exp = integer_zero_node;
8482 frac = arg0;
8483 break;
8484 case rvc_nan:
8485 case rvc_inf:
8486 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8487 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8488 case rvc_normal:
8490 /* Since the frexp function always expects base 2, and in
8491 GCC normalized significands are already in the range
8492 [0.5, 1.0), we have exactly what frexp wants. */
8493 REAL_VALUE_TYPE frac_rvt = *value;
8494 SET_REAL_EXP (&frac_rvt, 0);
8495 frac = build_real (rettype, frac_rvt);
8496 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8498 break;
8499 default:
8500 gcc_unreachable ();
8503 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8504 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8505 TREE_SIDE_EFFECTS (arg1) = 1;
8506 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8509 return NULL_TREE;
8512 /* Fold a call to builtin modf. */
8514 static tree
8515 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8517 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8518 return NULL_TREE;
8520 STRIP_NOPS (arg0);
8522 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8523 return NULL_TREE;
8525 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8527 /* Proceed if a valid pointer type was passed in. */
8528 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8530 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8531 REAL_VALUE_TYPE trunc, frac;
8533 switch (value->cl)
8535 case rvc_nan:
8536 case rvc_zero:
8537 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8538 trunc = frac = *value;
8539 break;
8540 case rvc_inf:
8541 /* For +-Inf, return (*arg1 = arg0, +-0). */
8542 frac = dconst0;
8543 frac.sign = value->sign;
8544 trunc = *value;
8545 break;
8546 case rvc_normal:
8547 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8548 real_trunc (&trunc, VOIDmode, value);
8549 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8550 /* If the original number was negative and already
8551 integral, then the fractional part is -0.0. */
8552 if (value->sign && frac.cl == rvc_zero)
8553 frac.sign = value->sign;
8554 break;
8557 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8558 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8559 build_real (rettype, trunc));
8560 TREE_SIDE_EFFECTS (arg1) = 1;
8561 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8562 build_real (rettype, frac));
8565 return NULL_TREE;
8568 /* Given a location LOC, an interclass builtin function decl FNDECL
8569 and its single argument ARG, return an folded expression computing
8570 the same, or NULL_TREE if we either couldn't or didn't want to fold
8571 (the latter happen if there's an RTL instruction available). */
8573 static tree
8574 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8576 machine_mode mode;
8578 if (!validate_arg (arg, REAL_TYPE))
8579 return NULL_TREE;
8581 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8582 return NULL_TREE;
8584 mode = TYPE_MODE (TREE_TYPE (arg));
8586 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8588 /* If there is no optab, try generic code. */
8589 switch (DECL_FUNCTION_CODE (fndecl))
8591 tree result;
8593 CASE_FLT_FN (BUILT_IN_ISINF):
8595 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8596 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8597 tree type = TREE_TYPE (arg);
8598 REAL_VALUE_TYPE r;
8599 char buf[128];
8601 if (is_ibm_extended)
8603 /* NaN and Inf are encoded in the high-order double value
8604 only. The low-order value is not significant. */
8605 type = double_type_node;
8606 mode = DFmode;
8607 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8609 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8610 real_from_string (&r, buf);
8611 result = build_call_expr (isgr_fn, 2,
8612 fold_build1_loc (loc, ABS_EXPR, type, arg),
8613 build_real (type, r));
8614 return result;
8616 CASE_FLT_FN (BUILT_IN_FINITE):
8617 case BUILT_IN_ISFINITE:
8619 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8620 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8621 tree type = TREE_TYPE (arg);
8622 REAL_VALUE_TYPE r;
8623 char buf[128];
8625 if (is_ibm_extended)
8627 /* NaN and Inf are encoded in the high-order double value
8628 only. The low-order value is not significant. */
8629 type = double_type_node;
8630 mode = DFmode;
8631 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8633 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8634 real_from_string (&r, buf);
8635 result = build_call_expr (isle_fn, 2,
8636 fold_build1_loc (loc, ABS_EXPR, type, arg),
8637 build_real (type, r));
8638 /*result = fold_build2_loc (loc, UNGT_EXPR,
8639 TREE_TYPE (TREE_TYPE (fndecl)),
8640 fold_build1_loc (loc, ABS_EXPR, type, arg),
8641 build_real (type, r));
8642 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8643 TREE_TYPE (TREE_TYPE (fndecl)),
8644 result);*/
8645 return result;
8647 case BUILT_IN_ISNORMAL:
8649 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8650 islessequal(fabs(x),DBL_MAX). */
8651 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8652 tree type = TREE_TYPE (arg);
8653 tree orig_arg, max_exp, min_exp;
8654 machine_mode orig_mode = mode;
8655 REAL_VALUE_TYPE rmax, rmin;
8656 char buf[128];
8658 orig_arg = arg = builtin_save_expr (arg);
8659 if (is_ibm_extended)
8661 /* Use double to test the normal range of IBM extended
8662 precision. Emin for IBM extended precision is
8663 different to emin for IEEE double, being 53 higher
8664 since the low double exponent is at least 53 lower
8665 than the high double exponent. */
8666 type = double_type_node;
8667 mode = DFmode;
8668 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8670 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8672 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8673 real_from_string (&rmax, buf);
8674 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8675 real_from_string (&rmin, buf);
8676 max_exp = build_real (type, rmax);
8677 min_exp = build_real (type, rmin);
8679 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8680 if (is_ibm_extended)
8682 /* Testing the high end of the range is done just using
8683 the high double, using the same test as isfinite().
8684 For the subnormal end of the range we first test the
8685 high double, then if its magnitude is equal to the
8686 limit of 0x1p-969, we test whether the low double is
8687 non-zero and opposite sign to the high double. */
8688 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8689 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8690 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8691 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8692 arg, min_exp);
8693 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8694 complex_double_type_node, orig_arg);
8695 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8696 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8697 tree zero = build_real (type, dconst0);
8698 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8699 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8700 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8701 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8702 fold_build3 (COND_EXPR,
8703 integer_type_node,
8704 hilt, logt, lolt));
8705 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8706 eq_min, ok_lo);
8707 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8708 gt_min, eq_min);
8710 else
8712 tree const isge_fn
8713 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8714 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8716 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8717 max_exp, min_exp);
8718 return result;
8720 default:
8721 break;
8724 return NULL_TREE;
8727 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8728 ARG is the argument for the call. */
8730 static tree
8731 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8735 if (!validate_arg (arg, REAL_TYPE))
8736 return NULL_TREE;
8738 switch (builtin_index)
8740 case BUILT_IN_ISINF:
8741 if (tree_expr_infinite_p (arg))
8742 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8743 if (!tree_expr_maybe_infinite_p (arg))
8744 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8745 return NULL_TREE;
8747 case BUILT_IN_ISINF_SIGN:
8749 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8750 /* In a boolean context, GCC will fold the inner COND_EXPR to
8751 1. So e.g. "if (isinf_sign(x))" would be folded to just
8752 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8753 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8754 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8755 tree tmp = NULL_TREE;
8757 arg = builtin_save_expr (arg);
8759 if (signbit_fn && isinf_fn)
8761 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8762 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8764 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8765 signbit_call, integer_zero_node);
8766 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8767 isinf_call, integer_zero_node);
8769 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8770 integer_minus_one_node, integer_one_node);
8771 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8772 isinf_call, tmp,
8773 integer_zero_node);
8776 return tmp;
8779 case BUILT_IN_ISFINITE:
8780 if (tree_expr_finite_p (arg))
8781 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8782 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8783 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8784 return NULL_TREE;
8786 case BUILT_IN_ISNAN:
8787 if (tree_expr_nan_p (arg))
8788 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8789 if (!tree_expr_maybe_nan_p (arg))
8790 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8793 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8794 if (is_ibm_extended)
8796 /* NaN and Inf are encoded in the high-order double value
8797 only. The low-order value is not significant. */
8798 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8801 arg = builtin_save_expr (arg);
8802 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8804 default:
8805 gcc_unreachable ();
8809 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8810 This builtin will generate code to return the appropriate floating
8811 point classification depending on the value of the floating point
8812 number passed in. The possible return values must be supplied as
8813 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8814 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8815 one floating point argument which is "type generic". */
8817 static tree
8818 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8820 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8821 arg, type, res, tmp;
8822 machine_mode mode;
8823 REAL_VALUE_TYPE r;
8824 char buf[128];
8826 /* Verify the required arguments in the original call. */
8827 if (nargs != 6
8828 || !validate_arg (args[0], INTEGER_TYPE)
8829 || !validate_arg (args[1], INTEGER_TYPE)
8830 || !validate_arg (args[2], INTEGER_TYPE)
8831 || !validate_arg (args[3], INTEGER_TYPE)
8832 || !validate_arg (args[4], INTEGER_TYPE)
8833 || !validate_arg (args[5], REAL_TYPE))
8834 return NULL_TREE;
8836 fp_nan = args[0];
8837 fp_infinite = args[1];
8838 fp_normal = args[2];
8839 fp_subnormal = args[3];
8840 fp_zero = args[4];
8841 arg = args[5];
8842 type = TREE_TYPE (arg);
8843 mode = TYPE_MODE (type);
8844 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8846 /* fpclassify(x) ->
8847 isnan(x) ? FP_NAN :
8848 (fabs(x) == Inf ? FP_INFINITE :
8849 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8850 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8852 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8853 build_real (type, dconst0));
8854 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8855 tmp, fp_zero, fp_subnormal);
8857 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8858 real_from_string (&r, buf);
8859 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8860 arg, build_real (type, r));
8861 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8863 if (tree_expr_maybe_infinite_p (arg))
8865 real_inf (&r);
8866 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8867 build_real (type, r));
8868 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8869 fp_infinite, res);
8872 if (tree_expr_maybe_nan_p (arg))
8874 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8875 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8878 return res;
8881 /* Fold a call to an unordered comparison function such as
8882 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8883 being called and ARG0 and ARG1 are the arguments for the call.
8884 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8885 the opposite of the desired result. UNORDERED_CODE is used
8886 for modes that can hold NaNs and ORDERED_CODE is used for
8887 the rest. */
8889 static tree
8890 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8891 enum tree_code unordered_code,
8892 enum tree_code ordered_code)
8894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8895 enum tree_code code;
8896 tree type0, type1;
8897 enum tree_code code0, code1;
8898 tree cmp_type = NULL_TREE;
8900 type0 = TREE_TYPE (arg0);
8901 type1 = TREE_TYPE (arg1);
8903 code0 = TREE_CODE (type0);
8904 code1 = TREE_CODE (type1);
8906 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8907 /* Choose the wider of two real types. */
8908 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8909 ? type0 : type1;
8910 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8911 cmp_type = type0;
8912 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8913 cmp_type = type1;
8915 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8916 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8918 if (unordered_code == UNORDERED_EXPR)
8920 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
8921 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
8922 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
8923 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8924 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8927 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
8928 ? unordered_code : ordered_code;
8929 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8930 fold_build2_loc (loc, code, type, arg0, arg1));
8933 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8934 arithmetics if it can never overflow, or into internal functions that
8935 return both result of arithmetics and overflowed boolean flag in
8936 a complex integer result, or some other check for overflow.
8937 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8938 checking part of that. */
8940 static tree
8941 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8942 tree arg0, tree arg1, tree arg2)
8944 enum internal_fn ifn = IFN_LAST;
8945 /* The code of the expression corresponding to the built-in. */
8946 enum tree_code opcode = ERROR_MARK;
8947 bool ovf_only = false;
8949 switch (fcode)
8951 case BUILT_IN_ADD_OVERFLOW_P:
8952 ovf_only = true;
8953 /* FALLTHRU */
8954 case BUILT_IN_ADD_OVERFLOW:
8955 case BUILT_IN_SADD_OVERFLOW:
8956 case BUILT_IN_SADDL_OVERFLOW:
8957 case BUILT_IN_SADDLL_OVERFLOW:
8958 case BUILT_IN_UADD_OVERFLOW:
8959 case BUILT_IN_UADDL_OVERFLOW:
8960 case BUILT_IN_UADDLL_OVERFLOW:
8961 opcode = PLUS_EXPR;
8962 ifn = IFN_ADD_OVERFLOW;
8963 break;
8964 case BUILT_IN_SUB_OVERFLOW_P:
8965 ovf_only = true;
8966 /* FALLTHRU */
8967 case BUILT_IN_SUB_OVERFLOW:
8968 case BUILT_IN_SSUB_OVERFLOW:
8969 case BUILT_IN_SSUBL_OVERFLOW:
8970 case BUILT_IN_SSUBLL_OVERFLOW:
8971 case BUILT_IN_USUB_OVERFLOW:
8972 case BUILT_IN_USUBL_OVERFLOW:
8973 case BUILT_IN_USUBLL_OVERFLOW:
8974 opcode = MINUS_EXPR;
8975 ifn = IFN_SUB_OVERFLOW;
8976 break;
8977 case BUILT_IN_MUL_OVERFLOW_P:
8978 ovf_only = true;
8979 /* FALLTHRU */
8980 case BUILT_IN_MUL_OVERFLOW:
8981 case BUILT_IN_SMUL_OVERFLOW:
8982 case BUILT_IN_SMULL_OVERFLOW:
8983 case BUILT_IN_SMULLL_OVERFLOW:
8984 case BUILT_IN_UMUL_OVERFLOW:
8985 case BUILT_IN_UMULL_OVERFLOW:
8986 case BUILT_IN_UMULLL_OVERFLOW:
8987 opcode = MULT_EXPR;
8988 ifn = IFN_MUL_OVERFLOW;
8989 break;
8990 default:
8991 gcc_unreachable ();
8994 /* For the "generic" overloads, the first two arguments can have different
8995 types and the last argument determines the target type to use to check
8996 for overflow. The arguments of the other overloads all have the same
8997 type. */
8998 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9000 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9001 arguments are constant, attempt to fold the built-in call into a constant
9002 expression indicating whether or not it detected an overflow. */
9003 if (ovf_only
9004 && TREE_CODE (arg0) == INTEGER_CST
9005 && TREE_CODE (arg1) == INTEGER_CST)
9006 /* Perform the computation in the target type and check for overflow. */
9007 return omit_one_operand_loc (loc, boolean_type_node,
9008 arith_overflowed_p (opcode, type, arg0, arg1)
9009 ? boolean_true_node : boolean_false_node,
9010 arg2);
9012 tree intres, ovfres;
9013 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9015 intres = fold_binary_loc (loc, opcode, type,
9016 fold_convert_loc (loc, type, arg0),
9017 fold_convert_loc (loc, type, arg1));
9018 if (TREE_OVERFLOW (intres))
9019 intres = drop_tree_overflow (intres);
9020 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9021 ? boolean_true_node : boolean_false_node);
9023 else
9025 tree ctype = build_complex_type (type);
9026 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9027 arg0, arg1);
9028 tree tgt = save_expr (call);
9029 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9030 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9031 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9034 if (ovf_only)
9035 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9037 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9038 tree store
9039 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9040 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9043 /* Fold a call to __builtin_FILE to a constant string. */
9045 static inline tree
9046 fold_builtin_FILE (location_t loc)
9048 if (const char *fname = LOCATION_FILE (loc))
9050 /* The documentation says this builtin is equivalent to the preprocessor
9051 __FILE__ macro so it appears appropriate to use the same file prefix
9052 mappings. */
9053 fname = remap_macro_filename (fname);
9054 return build_string_literal (strlen (fname) + 1, fname);
9057 return build_string_literal (1, "");
9060 /* Fold a call to __builtin_FUNCTION to a constant string. */
9062 static inline tree
9063 fold_builtin_FUNCTION ()
9065 const char *name = "";
9067 if (current_function_decl)
9068 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9070 return build_string_literal (strlen (name) + 1, name);
9073 /* Fold a call to __builtin_LINE to an integer constant. */
9075 static inline tree
9076 fold_builtin_LINE (location_t loc, tree type)
9078 return build_int_cst (type, LOCATION_LINE (loc));
9081 /* Fold a call to built-in function FNDECL with 0 arguments.
9082 This function returns NULL_TREE if no simplification was possible. */
9084 static tree
9085 fold_builtin_0 (location_t loc, tree fndecl)
9087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9088 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9089 switch (fcode)
9091 case BUILT_IN_FILE:
9092 return fold_builtin_FILE (loc);
9094 case BUILT_IN_FUNCTION:
9095 return fold_builtin_FUNCTION ();
9097 case BUILT_IN_LINE:
9098 return fold_builtin_LINE (loc, type);
9100 CASE_FLT_FN (BUILT_IN_INF):
9101 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9102 case BUILT_IN_INFD32:
9103 case BUILT_IN_INFD64:
9104 case BUILT_IN_INFD128:
9105 return fold_builtin_inf (loc, type, true);
9107 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9108 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9109 return fold_builtin_inf (loc, type, false);
9111 case BUILT_IN_CLASSIFY_TYPE:
9112 return fold_builtin_classify_type (NULL_TREE);
9114 default:
9115 break;
9117 return NULL_TREE;
9120 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9121 This function returns NULL_TREE if no simplification was possible. */
9123 static tree
9124 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9126 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9127 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9129 if (TREE_CODE (arg0) == ERROR_MARK)
9130 return NULL_TREE;
9132 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9133 return ret;
9135 switch (fcode)
9137 case BUILT_IN_CONSTANT_P:
9139 tree val = fold_builtin_constant_p (arg0);
9141 /* Gimplification will pull the CALL_EXPR for the builtin out of
9142 an if condition. When not optimizing, we'll not CSE it back.
9143 To avoid link error types of regressions, return false now. */
9144 if (!val && !optimize)
9145 val = integer_zero_node;
9147 return val;
9150 case BUILT_IN_CLASSIFY_TYPE:
9151 return fold_builtin_classify_type (arg0);
9153 case BUILT_IN_STRLEN:
9154 return fold_builtin_strlen (loc, expr, type, arg0);
9156 CASE_FLT_FN (BUILT_IN_FABS):
9157 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9158 case BUILT_IN_FABSD32:
9159 case BUILT_IN_FABSD64:
9160 case BUILT_IN_FABSD128:
9161 return fold_builtin_fabs (loc, arg0, type);
9163 case BUILT_IN_ABS:
9164 case BUILT_IN_LABS:
9165 case BUILT_IN_LLABS:
9166 case BUILT_IN_IMAXABS:
9167 return fold_builtin_abs (loc, arg0, type);
9169 CASE_FLT_FN (BUILT_IN_CONJ):
9170 if (validate_arg (arg0, COMPLEX_TYPE)
9171 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9172 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9173 break;
9175 CASE_FLT_FN (BUILT_IN_CREAL):
9176 if (validate_arg (arg0, COMPLEX_TYPE)
9177 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9178 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9179 break;
9181 CASE_FLT_FN (BUILT_IN_CIMAG):
9182 if (validate_arg (arg0, COMPLEX_TYPE)
9183 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9184 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9185 break;
9187 CASE_FLT_FN (BUILT_IN_CARG):
9188 return fold_builtin_carg (loc, arg0, type);
9190 case BUILT_IN_ISASCII:
9191 return fold_builtin_isascii (loc, arg0);
9193 case BUILT_IN_TOASCII:
9194 return fold_builtin_toascii (loc, arg0);
9196 case BUILT_IN_ISDIGIT:
9197 return fold_builtin_isdigit (loc, arg0);
9199 CASE_FLT_FN (BUILT_IN_FINITE):
9200 case BUILT_IN_FINITED32:
9201 case BUILT_IN_FINITED64:
9202 case BUILT_IN_FINITED128:
9203 case BUILT_IN_ISFINITE:
9205 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9206 if (ret)
9207 return ret;
9208 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9211 CASE_FLT_FN (BUILT_IN_ISINF):
9212 case BUILT_IN_ISINFD32:
9213 case BUILT_IN_ISINFD64:
9214 case BUILT_IN_ISINFD128:
9216 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9217 if (ret)
9218 return ret;
9219 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9222 case BUILT_IN_ISNORMAL:
9223 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9225 case BUILT_IN_ISINF_SIGN:
9226 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9228 CASE_FLT_FN (BUILT_IN_ISNAN):
9229 case BUILT_IN_ISNAND32:
9230 case BUILT_IN_ISNAND64:
9231 case BUILT_IN_ISNAND128:
9232 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9234 case BUILT_IN_FREE:
9235 if (integer_zerop (arg0))
9236 return build_empty_stmt (loc);
9237 break;
9239 default:
9240 break;
9243 return NULL_TREE;
9247 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9248 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9249 if no simplification was possible. */
9251 static tree
9252 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9254 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9255 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9257 if (TREE_CODE (arg0) == ERROR_MARK
9258 || TREE_CODE (arg1) == ERROR_MARK)
9259 return NULL_TREE;
9261 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9262 return ret;
9264 switch (fcode)
9266 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9267 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9268 if (validate_arg (arg0, REAL_TYPE)
9269 && validate_arg (arg1, POINTER_TYPE))
9270 return do_mpfr_lgamma_r (arg0, arg1, type);
9271 break;
9273 CASE_FLT_FN (BUILT_IN_FREXP):
9274 return fold_builtin_frexp (loc, arg0, arg1, type);
9276 CASE_FLT_FN (BUILT_IN_MODF):
9277 return fold_builtin_modf (loc, arg0, arg1, type);
9279 case BUILT_IN_STRSPN:
9280 return fold_builtin_strspn (loc, expr, arg0, arg1);
9282 case BUILT_IN_STRCSPN:
9283 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9285 case BUILT_IN_STRPBRK:
9286 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9288 case BUILT_IN_EXPECT:
9289 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9291 case BUILT_IN_ISGREATER:
9292 return fold_builtin_unordered_cmp (loc, fndecl,
9293 arg0, arg1, UNLE_EXPR, LE_EXPR);
9294 case BUILT_IN_ISGREATEREQUAL:
9295 return fold_builtin_unordered_cmp (loc, fndecl,
9296 arg0, arg1, UNLT_EXPR, LT_EXPR);
9297 case BUILT_IN_ISLESS:
9298 return fold_builtin_unordered_cmp (loc, fndecl,
9299 arg0, arg1, UNGE_EXPR, GE_EXPR);
9300 case BUILT_IN_ISLESSEQUAL:
9301 return fold_builtin_unordered_cmp (loc, fndecl,
9302 arg0, arg1, UNGT_EXPR, GT_EXPR);
9303 case BUILT_IN_ISLESSGREATER:
9304 return fold_builtin_unordered_cmp (loc, fndecl,
9305 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9306 case BUILT_IN_ISUNORDERED:
9307 return fold_builtin_unordered_cmp (loc, fndecl,
9308 arg0, arg1, UNORDERED_EXPR,
9309 NOP_EXPR);
9311 /* We do the folding for va_start in the expander. */
9312 case BUILT_IN_VA_START:
9313 break;
9315 case BUILT_IN_OBJECT_SIZE:
9316 return fold_builtin_object_size (arg0, arg1);
9318 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9319 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9321 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9322 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9324 default:
9325 break;
9327 return NULL_TREE;
9330 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9331 and ARG2.
9332 This function returns NULL_TREE if no simplification was possible. */
9334 static tree
9335 fold_builtin_3 (location_t loc, tree fndecl,
9336 tree arg0, tree arg1, tree arg2)
9338 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9339 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9341 if (TREE_CODE (arg0) == ERROR_MARK
9342 || TREE_CODE (arg1) == ERROR_MARK
9343 || TREE_CODE (arg2) == ERROR_MARK)
9344 return NULL_TREE;
9346 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9347 arg0, arg1, arg2))
9348 return ret;
9350 switch (fcode)
9353 CASE_FLT_FN (BUILT_IN_SINCOS):
9354 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9356 CASE_FLT_FN (BUILT_IN_REMQUO):
9357 if (validate_arg (arg0, REAL_TYPE)
9358 && validate_arg (arg1, REAL_TYPE)
9359 && validate_arg (arg2, POINTER_TYPE))
9360 return do_mpfr_remquo (arg0, arg1, arg2);
9361 break;
9363 case BUILT_IN_MEMCMP:
9364 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9366 case BUILT_IN_EXPECT:
9367 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9369 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9370 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9372 case BUILT_IN_ADD_OVERFLOW:
9373 case BUILT_IN_SUB_OVERFLOW:
9374 case BUILT_IN_MUL_OVERFLOW:
9375 case BUILT_IN_ADD_OVERFLOW_P:
9376 case BUILT_IN_SUB_OVERFLOW_P:
9377 case BUILT_IN_MUL_OVERFLOW_P:
9378 case BUILT_IN_SADD_OVERFLOW:
9379 case BUILT_IN_SADDL_OVERFLOW:
9380 case BUILT_IN_SADDLL_OVERFLOW:
9381 case BUILT_IN_SSUB_OVERFLOW:
9382 case BUILT_IN_SSUBL_OVERFLOW:
9383 case BUILT_IN_SSUBLL_OVERFLOW:
9384 case BUILT_IN_SMUL_OVERFLOW:
9385 case BUILT_IN_SMULL_OVERFLOW:
9386 case BUILT_IN_SMULLL_OVERFLOW:
9387 case BUILT_IN_UADD_OVERFLOW:
9388 case BUILT_IN_UADDL_OVERFLOW:
9389 case BUILT_IN_UADDLL_OVERFLOW:
9390 case BUILT_IN_USUB_OVERFLOW:
9391 case BUILT_IN_USUBL_OVERFLOW:
9392 case BUILT_IN_USUBLL_OVERFLOW:
9393 case BUILT_IN_UMUL_OVERFLOW:
9394 case BUILT_IN_UMULL_OVERFLOW:
9395 case BUILT_IN_UMULLL_OVERFLOW:
9396 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9398 default:
9399 break;
9401 return NULL_TREE;
9404 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9405 ARGS is an array of NARGS arguments. IGNORE is true if the result
9406 of the function call is ignored. This function returns NULL_TREE
9407 if no simplification was possible. */
9409 static tree
9410 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9411 int nargs, bool)
9413 tree ret = NULL_TREE;
9415 switch (nargs)
9417 case 0:
9418 ret = fold_builtin_0 (loc, fndecl);
9419 break;
9420 case 1:
9421 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9422 break;
9423 case 2:
9424 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9425 break;
9426 case 3:
9427 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9428 break;
9429 default:
9430 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9431 break;
9433 if (ret)
9435 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9436 SET_EXPR_LOCATION (ret, loc);
9437 return ret;
9439 return NULL_TREE;
9442 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9443 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9444 of arguments in ARGS to be omitted. OLDNARGS is the number of
9445 elements in ARGS. */
9447 static tree
9448 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9449 int skip, tree fndecl, int n, va_list newargs)
9451 int nargs = oldnargs - skip + n;
9452 tree *buffer;
9454 if (n > 0)
9456 int i, j;
9458 buffer = XALLOCAVEC (tree, nargs);
9459 for (i = 0; i < n; i++)
9460 buffer[i] = va_arg (newargs, tree);
9461 for (j = skip; j < oldnargs; j++, i++)
9462 buffer[i] = args[j];
9464 else
9465 buffer = args + skip;
9467 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9470 /* Return true if FNDECL shouldn't be folded right now.
9471 If a built-in function has an inline attribute always_inline
9472 wrapper, defer folding it after always_inline functions have
9473 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9474 might not be performed. */
9476 bool
9477 avoid_folding_inline_builtin (tree fndecl)
9479 return (DECL_DECLARED_INLINE_P (fndecl)
9480 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9481 && cfun
9482 && !cfun->always_inline_functions_inlined
9483 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9486 /* A wrapper function for builtin folding that prevents warnings for
9487 "statement without effect" and the like, caused by removing the
9488 call node earlier than the warning is generated. */
9490 tree
9491 fold_call_expr (location_t loc, tree exp, bool ignore)
9493 tree ret = NULL_TREE;
9494 tree fndecl = get_callee_fndecl (exp);
9495 if (fndecl && fndecl_built_in_p (fndecl)
9496 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9497 yet. Defer folding until we see all the arguments
9498 (after inlining). */
9499 && !CALL_EXPR_VA_ARG_PACK (exp))
9501 int nargs = call_expr_nargs (exp);
9503 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9504 instead last argument is __builtin_va_arg_pack (). Defer folding
9505 even in that case, until arguments are finalized. */
9506 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9508 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9509 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9510 return NULL_TREE;
9513 if (avoid_folding_inline_builtin (fndecl))
9514 return NULL_TREE;
9516 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9517 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9518 CALL_EXPR_ARGP (exp), ignore);
9519 else
9521 tree *args = CALL_EXPR_ARGP (exp);
9522 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9523 if (ret)
9524 return ret;
9527 return NULL_TREE;
9530 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9531 N arguments are passed in the array ARGARRAY. Return a folded
9532 expression or NULL_TREE if no simplification was possible. */
9534 tree
9535 fold_builtin_call_array (location_t loc, tree,
9536 tree fn,
9537 int n,
9538 tree *argarray)
9540 if (TREE_CODE (fn) != ADDR_EXPR)
9541 return NULL_TREE;
9543 tree fndecl = TREE_OPERAND (fn, 0);
9544 if (TREE_CODE (fndecl) == FUNCTION_DECL
9545 && fndecl_built_in_p (fndecl))
9547 /* If last argument is __builtin_va_arg_pack (), arguments to this
9548 function are not finalized yet. Defer folding until they are. */
9549 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9551 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9552 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9553 return NULL_TREE;
9555 if (avoid_folding_inline_builtin (fndecl))
9556 return NULL_TREE;
9557 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9558 return targetm.fold_builtin (fndecl, n, argarray, false);
9559 else
9560 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9563 return NULL_TREE;
9566 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9567 along with N new arguments specified as the "..." parameters. SKIP
9568 is the number of arguments in EXP to be omitted. This function is used
9569 to do varargs-to-varargs transformations. */
9571 static tree
9572 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9574 va_list ap;
9575 tree t;
9577 va_start (ap, n);
9578 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9579 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9580 va_end (ap);
9582 return t;
9585 /* Validate a single argument ARG against a tree code CODE representing
9586 a type. Return true when argument is valid. */
9588 static bool
9589 validate_arg (const_tree arg, enum tree_code code)
9591 if (!arg)
9592 return false;
9593 else if (code == POINTER_TYPE)
9594 return POINTER_TYPE_P (TREE_TYPE (arg));
9595 else if (code == INTEGER_TYPE)
9596 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9597 return code == TREE_CODE (TREE_TYPE (arg));
9600 /* This function validates the types of a function call argument list
9601 against a specified list of tree_codes. If the last specifier is a 0,
9602 that represents an ellipses, otherwise the last specifier must be a
9603 VOID_TYPE.
9605 This is the GIMPLE version of validate_arglist. Eventually we want to
9606 completely convert builtins.c to work from GIMPLEs and the tree based
9607 validate_arglist will then be removed. */
9609 bool
9610 validate_gimple_arglist (const gcall *call, ...)
9612 enum tree_code code;
9613 bool res = 0;
9614 va_list ap;
9615 const_tree arg;
9616 size_t i;
9618 va_start (ap, call);
9619 i = 0;
9623 code = (enum tree_code) va_arg (ap, int);
9624 switch (code)
9626 case 0:
9627 /* This signifies an ellipses, any further arguments are all ok. */
9628 res = true;
9629 goto end;
9630 case VOID_TYPE:
9631 /* This signifies an endlink, if no arguments remain, return
9632 true, otherwise return false. */
9633 res = (i == gimple_call_num_args (call));
9634 goto end;
9635 default:
9636 /* If no parameters remain or the parameter's code does not
9637 match the specified code, return false. Otherwise continue
9638 checking any remaining arguments. */
9639 arg = gimple_call_arg (call, i++);
9640 if (!validate_arg (arg, code))
9641 goto end;
9642 break;
9645 while (1);
9647 /* We need gotos here since we can only have one VA_CLOSE in a
9648 function. */
9649 end: ;
9650 va_end (ap);
9652 return res;
9655 /* Default target-specific builtin expander that does nothing. */
9658 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9659 rtx target ATTRIBUTE_UNUSED,
9660 rtx subtarget ATTRIBUTE_UNUSED,
9661 machine_mode mode ATTRIBUTE_UNUSED,
9662 int ignore ATTRIBUTE_UNUSED)
9664 return NULL_RTX;
9667 /* Returns true is EXP represents data that would potentially reside
9668 in a readonly section. */
9670 bool
9671 readonly_data_expr (tree exp)
9673 STRIP_NOPS (exp);
9675 if (TREE_CODE (exp) != ADDR_EXPR)
9676 return false;
9678 exp = get_base_address (TREE_OPERAND (exp, 0));
9679 if (!exp)
9680 return false;
9682 /* Make sure we call decl_readonly_section only for trees it
9683 can handle (since it returns true for everything it doesn't
9684 understand). */
9685 if (TREE_CODE (exp) == STRING_CST
9686 || TREE_CODE (exp) == CONSTRUCTOR
9687 || (VAR_P (exp) && TREE_STATIC (exp)))
9688 return decl_readonly_section (exp, 0);
9689 else
9690 return false;
9693 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9694 to the call, and TYPE is its return type.
9696 Return NULL_TREE if no simplification was possible, otherwise return the
9697 simplified form of the call as a tree.
9699 The simplified form may be a constant or other expression which
9700 computes the same value, but in a more efficient manner (including
9701 calls to other builtin functions).
9703 The call may contain arguments which need to be evaluated, but
9704 which are not useful to determine the result of the call. In
9705 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9706 COMPOUND_EXPR will be an argument which must be evaluated.
9707 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9708 COMPOUND_EXPR in the chain will contain the tree for the simplified
9709 form of the builtin function call. */
9711 static tree
9712 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9714 if (!validate_arg (s1, POINTER_TYPE)
9715 || !validate_arg (s2, POINTER_TYPE))
9716 return NULL_TREE;
9718 tree fn;
9719 const char *p1, *p2;
9721 p2 = c_getstr (s2);
9722 if (p2 == NULL)
9723 return NULL_TREE;
9725 p1 = c_getstr (s1);
9726 if (p1 != NULL)
9728 const char *r = strpbrk (p1, p2);
9729 tree tem;
9731 if (r == NULL)
9732 return build_int_cst (TREE_TYPE (s1), 0);
9734 /* Return an offset into the constant string argument. */
9735 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9736 return fold_convert_loc (loc, type, tem);
9739 if (p2[0] == '\0')
9740 /* strpbrk(x, "") == NULL.
9741 Evaluate and ignore s1 in case it had side-effects. */
9742 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9744 if (p2[1] != '\0')
9745 return NULL_TREE; /* Really call strpbrk. */
9747 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9748 if (!fn)
9749 return NULL_TREE;
9751 /* New argument list transforming strpbrk(s1, s2) to
9752 strchr(s1, s2[0]). */
9753 return build_call_expr_loc (loc, fn, 2, s1,
9754 build_int_cst (integer_type_node, p2[0]));
9757 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9758 to the call.
9760 Return NULL_TREE if no simplification was possible, otherwise return the
9761 simplified form of the call as a tree.
9763 The simplified form may be a constant or other expression which
9764 computes the same value, but in a more efficient manner (including
9765 calls to other builtin functions).
9767 The call may contain arguments which need to be evaluated, but
9768 which are not useful to determine the result of the call. In
9769 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9770 COMPOUND_EXPR will be an argument which must be evaluated.
9771 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9772 COMPOUND_EXPR in the chain will contain the tree for the simplified
9773 form of the builtin function call. */
9775 static tree
9776 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9778 if (!validate_arg (s1, POINTER_TYPE)
9779 || !validate_arg (s2, POINTER_TYPE))
9780 return NULL_TREE;
9782 if (!check_nul_terminated_array (expr, s1)
9783 || !check_nul_terminated_array (expr, s2))
9784 return NULL_TREE;
9786 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9788 /* If either argument is "", return NULL_TREE. */
9789 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9790 /* Evaluate and ignore both arguments in case either one has
9791 side-effects. */
9792 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9793 s1, s2);
9794 return NULL_TREE;
9797 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9798 to the call.
9800 Return NULL_TREE if no simplification was possible, otherwise return the
9801 simplified form of the call as a tree.
9803 The simplified form may be a constant or other expression which
9804 computes the same value, but in a more efficient manner (including
9805 calls to other builtin functions).
9807 The call may contain arguments which need to be evaluated, but
9808 which are not useful to determine the result of the call. In
9809 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9810 COMPOUND_EXPR will be an argument which must be evaluated.
9811 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9812 COMPOUND_EXPR in the chain will contain the tree for the simplified
9813 form of the builtin function call. */
9815 static tree
9816 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9818 if (!validate_arg (s1, POINTER_TYPE)
9819 || !validate_arg (s2, POINTER_TYPE))
9820 return NULL_TREE;
9822 if (!check_nul_terminated_array (expr, s1)
9823 || !check_nul_terminated_array (expr, s2))
9824 return NULL_TREE;
9826 /* If the first argument is "", return NULL_TREE. */
9827 const char *p1 = c_getstr (s1);
9828 if (p1 && *p1 == '\0')
9830 /* Evaluate and ignore argument s2 in case it has
9831 side-effects. */
9832 return omit_one_operand_loc (loc, size_type_node,
9833 size_zero_node, s2);
9836 /* If the second argument is "", return __builtin_strlen(s1). */
9837 const char *p2 = c_getstr (s2);
9838 if (p2 && *p2 == '\0')
9840 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9842 /* If the replacement _DECL isn't initialized, don't do the
9843 transformation. */
9844 if (!fn)
9845 return NULL_TREE;
9847 return build_call_expr_loc (loc, fn, 1, s1);
9849 return NULL_TREE;
9852 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9853 produced. False otherwise. This is done so that we don't output the error
9854 or warning twice or three times. */
9856 bool
9857 fold_builtin_next_arg (tree exp, bool va_start_p)
9859 tree fntype = TREE_TYPE (current_function_decl);
9860 int nargs = call_expr_nargs (exp);
9861 tree arg;
9862 /* There is good chance the current input_location points inside the
9863 definition of the va_start macro (perhaps on the token for
9864 builtin) in a system header, so warnings will not be emitted.
9865 Use the location in real source code. */
9866 location_t current_location =
9867 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9868 NULL);
9870 if (!stdarg_p (fntype))
9872 error ("%<va_start%> used in function with fixed arguments");
9873 return true;
9876 if (va_start_p)
9878 if (va_start_p && (nargs != 2))
9880 error ("wrong number of arguments to function %<va_start%>");
9881 return true;
9883 arg = CALL_EXPR_ARG (exp, 1);
9885 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9886 when we checked the arguments and if needed issued a warning. */
9887 else
9889 if (nargs == 0)
9891 /* Evidently an out of date version of <stdarg.h>; can't validate
9892 va_start's second argument, but can still work as intended. */
9893 warning_at (current_location,
9894 OPT_Wvarargs,
9895 "%<__builtin_next_arg%> called without an argument");
9896 return true;
9898 else if (nargs > 1)
9900 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9901 return true;
9903 arg = CALL_EXPR_ARG (exp, 0);
9906 if (TREE_CODE (arg) == SSA_NAME
9907 && SSA_NAME_VAR (arg))
9908 arg = SSA_NAME_VAR (arg);
9910 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9911 or __builtin_next_arg (0) the first time we see it, after checking
9912 the arguments and if needed issuing a warning. */
9913 if (!integer_zerop (arg))
9915 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9917 /* Strip off all nops for the sake of the comparison. This
9918 is not quite the same as STRIP_NOPS. It does more.
9919 We must also strip off INDIRECT_EXPR for C++ reference
9920 parameters. */
9921 while (CONVERT_EXPR_P (arg)
9922 || TREE_CODE (arg) == INDIRECT_REF)
9923 arg = TREE_OPERAND (arg, 0);
9924 if (arg != last_parm)
9926 /* FIXME: Sometimes with the tree optimizers we can get the
9927 not the last argument even though the user used the last
9928 argument. We just warn and set the arg to be the last
9929 argument so that we will get wrong-code because of
9930 it. */
9931 warning_at (current_location,
9932 OPT_Wvarargs,
9933 "second parameter of %<va_start%> not last named argument");
9936 /* Undefined by C99 7.15.1.4p4 (va_start):
9937 "If the parameter parmN is declared with the register storage
9938 class, with a function or array type, or with a type that is
9939 not compatible with the type that results after application of
9940 the default argument promotions, the behavior is undefined."
9942 else if (DECL_REGISTER (arg))
9944 warning_at (current_location,
9945 OPT_Wvarargs,
9946 "undefined behavior when second parameter of "
9947 "%<va_start%> is declared with %<register%> storage");
9950 /* We want to verify the second parameter just once before the tree
9951 optimizers are run and then avoid keeping it in the tree,
9952 as otherwise we could warn even for correct code like:
9953 void foo (int i, ...)
9954 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9955 if (va_start_p)
9956 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9957 else
9958 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9960 return false;
9964 /* Expand a call EXP to __builtin_object_size. */
9966 static rtx
9967 expand_builtin_object_size (tree exp)
9969 tree ost;
9970 int object_size_type;
9971 tree fndecl = get_callee_fndecl (exp);
9973 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9975 error ("first argument of %qD must be a pointer, second integer constant",
9976 fndecl);
9977 expand_builtin_trap ();
9978 return const0_rtx;
9981 ost = CALL_EXPR_ARG (exp, 1);
9982 STRIP_NOPS (ost);
9984 if (TREE_CODE (ost) != INTEGER_CST
9985 || tree_int_cst_sgn (ost) < 0
9986 || compare_tree_int (ost, 3) > 0)
9988 error ("last argument of %qD is not integer constant between 0 and 3",
9989 fndecl);
9990 expand_builtin_trap ();
9991 return const0_rtx;
9994 object_size_type = tree_to_shwi (ost);
9996 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9999 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10000 FCODE is the BUILT_IN_* to use.
10001 Return NULL_RTX if we failed; the caller should emit a normal call,
10002 otherwise try to get the result in TARGET, if convenient (and in
10003 mode MODE if that's convenient). */
10005 static rtx
10006 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10007 enum built_in_function fcode)
10009 if (!validate_arglist (exp,
10010 POINTER_TYPE,
10011 fcode == BUILT_IN_MEMSET_CHK
10012 ? INTEGER_TYPE : POINTER_TYPE,
10013 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10014 return NULL_RTX;
10016 tree dest = CALL_EXPR_ARG (exp, 0);
10017 tree src = CALL_EXPR_ARG (exp, 1);
10018 tree len = CALL_EXPR_ARG (exp, 2);
10019 tree size = CALL_EXPR_ARG (exp, 3);
10021 /* FIXME: Set access mode to write only for memset et al. */
10022 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10023 /*srcstr=*/NULL_TREE, size, access_read_write);
10025 if (!tree_fits_uhwi_p (size))
10026 return NULL_RTX;
10028 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10030 /* Avoid transforming the checking call to an ordinary one when
10031 an overflow has been detected or when the call couldn't be
10032 validated because the size is not constant. */
10033 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10034 return NULL_RTX;
10036 tree fn = NULL_TREE;
10037 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10038 mem{cpy,pcpy,move,set} is available. */
10039 switch (fcode)
10041 case BUILT_IN_MEMCPY_CHK:
10042 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10043 break;
10044 case BUILT_IN_MEMPCPY_CHK:
10045 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10046 break;
10047 case BUILT_IN_MEMMOVE_CHK:
10048 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10049 break;
10050 case BUILT_IN_MEMSET_CHK:
10051 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10052 break;
10053 default:
10054 break;
10057 if (! fn)
10058 return NULL_RTX;
10060 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10061 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10062 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10063 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10065 else if (fcode == BUILT_IN_MEMSET_CHK)
10066 return NULL_RTX;
10067 else
10069 unsigned int dest_align = get_pointer_alignment (dest);
10071 /* If DEST is not a pointer type, call the normal function. */
10072 if (dest_align == 0)
10073 return NULL_RTX;
10075 /* If SRC and DEST are the same (and not volatile), do nothing. */
10076 if (operand_equal_p (src, dest, 0))
10078 tree expr;
10080 if (fcode != BUILT_IN_MEMPCPY_CHK)
10082 /* Evaluate and ignore LEN in case it has side-effects. */
10083 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10084 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10087 expr = fold_build_pointer_plus (dest, len);
10088 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10091 /* __memmove_chk special case. */
10092 if (fcode == BUILT_IN_MEMMOVE_CHK)
10094 unsigned int src_align = get_pointer_alignment (src);
10096 if (src_align == 0)
10097 return NULL_RTX;
10099 /* If src is categorized for a readonly section we can use
10100 normal __memcpy_chk. */
10101 if (readonly_data_expr (src))
10103 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10104 if (!fn)
10105 return NULL_RTX;
10106 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10107 dest, src, len, size);
10108 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10109 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10110 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10113 return NULL_RTX;
10117 /* Emit warning if a buffer overflow is detected at compile time. */
10119 static void
10120 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10122 /* The source string. */
10123 tree srcstr = NULL_TREE;
10124 /* The size of the destination object returned by __builtin_object_size. */
10125 tree objsize = NULL_TREE;
10126 /* The string that is being concatenated with (as in __strcat_chk)
10127 or null if it isn't. */
10128 tree catstr = NULL_TREE;
10129 /* The maximum length of the source sequence in a bounded operation
10130 (such as __strncat_chk) or null if the operation isn't bounded
10131 (such as __strcat_chk). */
10132 tree maxread = NULL_TREE;
10133 /* The exact size of the access (such as in __strncpy_chk). */
10134 tree size = NULL_TREE;
10135 /* The access by the function that's checked. Except for snprintf
10136 both writing and reading is checked. */
10137 access_mode mode = access_read_write;
10139 switch (fcode)
10141 case BUILT_IN_STRCPY_CHK:
10142 case BUILT_IN_STPCPY_CHK:
10143 srcstr = CALL_EXPR_ARG (exp, 1);
10144 objsize = CALL_EXPR_ARG (exp, 2);
10145 break;
10147 case BUILT_IN_STRCAT_CHK:
10148 /* For __strcat_chk the warning will be emitted only if overflowing
10149 by at least strlen (dest) + 1 bytes. */
10150 catstr = CALL_EXPR_ARG (exp, 0);
10151 srcstr = CALL_EXPR_ARG (exp, 1);
10152 objsize = CALL_EXPR_ARG (exp, 2);
10153 break;
10155 case BUILT_IN_STRNCAT_CHK:
10156 catstr = CALL_EXPR_ARG (exp, 0);
10157 srcstr = CALL_EXPR_ARG (exp, 1);
10158 maxread = CALL_EXPR_ARG (exp, 2);
10159 objsize = CALL_EXPR_ARG (exp, 3);
10160 break;
10162 case BUILT_IN_STRNCPY_CHK:
10163 case BUILT_IN_STPNCPY_CHK:
10164 srcstr = CALL_EXPR_ARG (exp, 1);
10165 size = CALL_EXPR_ARG (exp, 2);
10166 objsize = CALL_EXPR_ARG (exp, 3);
10167 break;
10169 case BUILT_IN_SNPRINTF_CHK:
10170 case BUILT_IN_VSNPRINTF_CHK:
10171 maxread = CALL_EXPR_ARG (exp, 1);
10172 objsize = CALL_EXPR_ARG (exp, 3);
10173 /* The only checked access the write to the destination. */
10174 mode = access_write_only;
10175 break;
10176 default:
10177 gcc_unreachable ();
10180 if (catstr && maxread)
10182 /* Check __strncat_chk. There is no way to determine the length
10183 of the string to which the source string is being appended so
10184 just warn when the length of the source string is not known. */
10185 check_strncat_sizes (exp, objsize);
10186 return;
10189 check_access (exp, size, maxread, srcstr, objsize, mode);
10192 /* Emit warning if a buffer overflow is detected at compile time
10193 in __sprintf_chk/__vsprintf_chk calls. */
10195 static void
10196 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10198 tree size, len, fmt;
10199 const char *fmt_str;
10200 int nargs = call_expr_nargs (exp);
10202 /* Verify the required arguments in the original call. */
10204 if (nargs < 4)
10205 return;
10206 size = CALL_EXPR_ARG (exp, 2);
10207 fmt = CALL_EXPR_ARG (exp, 3);
10209 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10210 return;
10212 /* Check whether the format is a literal string constant. */
10213 fmt_str = c_getstr (fmt);
10214 if (fmt_str == NULL)
10215 return;
10217 if (!init_target_chars ())
10218 return;
10220 /* If the format doesn't contain % args or %%, we know its size. */
10221 if (strchr (fmt_str, target_percent) == 0)
10222 len = build_int_cstu (size_type_node, strlen (fmt_str));
10223 /* If the format is "%s" and first ... argument is a string literal,
10224 we know it too. */
10225 else if (fcode == BUILT_IN_SPRINTF_CHK
10226 && strcmp (fmt_str, target_percent_s) == 0)
10228 tree arg;
10230 if (nargs < 5)
10231 return;
10232 arg = CALL_EXPR_ARG (exp, 4);
10233 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10234 return;
10236 len = c_strlen (arg, 1);
10237 if (!len || ! tree_fits_uhwi_p (len))
10238 return;
10240 else
10241 return;
10243 /* Add one for the terminating nul. */
10244 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10246 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10247 access_write_only);
10250 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10251 if possible. */
10253 static tree
10254 fold_builtin_object_size (tree ptr, tree ost)
10256 unsigned HOST_WIDE_INT bytes;
10257 int object_size_type;
10259 if (!validate_arg (ptr, POINTER_TYPE)
10260 || !validate_arg (ost, INTEGER_TYPE))
10261 return NULL_TREE;
10263 STRIP_NOPS (ost);
10265 if (TREE_CODE (ost) != INTEGER_CST
10266 || tree_int_cst_sgn (ost) < 0
10267 || compare_tree_int (ost, 3) > 0)
10268 return NULL_TREE;
10270 object_size_type = tree_to_shwi (ost);
10272 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10273 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10274 and (size_t) 0 for types 2 and 3. */
10275 if (TREE_SIDE_EFFECTS (ptr))
10276 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10278 if (TREE_CODE (ptr) == ADDR_EXPR)
10280 compute_builtin_object_size (ptr, object_size_type, &bytes);
10281 if (wi::fits_to_tree_p (bytes, size_type_node))
10282 return build_int_cstu (size_type_node, bytes);
10284 else if (TREE_CODE (ptr) == SSA_NAME)
10286 /* If object size is not known yet, delay folding until
10287 later. Maybe subsequent passes will help determining
10288 it. */
10289 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10290 && wi::fits_to_tree_p (bytes, size_type_node))
10291 return build_int_cstu (size_type_node, bytes);
10294 return NULL_TREE;
10297 /* Builtins with folding operations that operate on "..." arguments
10298 need special handling; we need to store the arguments in a convenient
10299 data structure before attempting any folding. Fortunately there are
10300 only a few builtins that fall into this category. FNDECL is the
10301 function, EXP is the CALL_EXPR for the call. */
10303 static tree
10304 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10307 tree ret = NULL_TREE;
10309 switch (fcode)
10311 case BUILT_IN_FPCLASSIFY:
10312 ret = fold_builtin_fpclassify (loc, args, nargs);
10313 break;
10315 default:
10316 break;
10318 if (ret)
10320 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10321 SET_EXPR_LOCATION (ret, loc);
10322 suppress_warning (ret);
10323 return ret;
10325 return NULL_TREE;
10328 /* Initialize format string characters in the target charset. */
10330 bool
10331 init_target_chars (void)
10333 static bool init;
10334 if (!init)
10336 target_newline = lang_hooks.to_target_charset ('\n');
10337 target_percent = lang_hooks.to_target_charset ('%');
10338 target_c = lang_hooks.to_target_charset ('c');
10339 target_s = lang_hooks.to_target_charset ('s');
10340 if (target_newline == 0 || target_percent == 0 || target_c == 0
10341 || target_s == 0)
10342 return false;
10344 target_percent_c[0] = target_percent;
10345 target_percent_c[1] = target_c;
10346 target_percent_c[2] = '\0';
10348 target_percent_s[0] = target_percent;
10349 target_percent_s[1] = target_s;
10350 target_percent_s[2] = '\0';
10352 target_percent_s_newline[0] = target_percent;
10353 target_percent_s_newline[1] = target_s;
10354 target_percent_s_newline[2] = target_newline;
10355 target_percent_s_newline[3] = '\0';
10357 init = true;
10359 return true;
10362 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10363 and no overflow/underflow occurred. INEXACT is true if M was not
10364 exactly calculated. TYPE is the tree type for the result. This
10365 function assumes that you cleared the MPFR flags and then
10366 calculated M to see if anything subsequently set a flag prior to
10367 entering this function. Return NULL_TREE if any checks fail. */
10369 static tree
10370 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10372 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10373 overflow/underflow occurred. If -frounding-math, proceed iff the
10374 result of calling FUNC was exact. */
10375 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10376 && (!flag_rounding_math || !inexact))
10378 REAL_VALUE_TYPE rr;
10380 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10381 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10382 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10383 but the mpft_t is not, then we underflowed in the
10384 conversion. */
10385 if (real_isfinite (&rr)
10386 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10388 REAL_VALUE_TYPE rmode;
10390 real_convert (&rmode, TYPE_MODE (type), &rr);
10391 /* Proceed iff the specified mode can hold the value. */
10392 if (real_identical (&rmode, &rr))
10393 return build_real (type, rmode);
10396 return NULL_TREE;
10399 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10400 number and no overflow/underflow occurred. INEXACT is true if M
10401 was not exactly calculated. TYPE is the tree type for the result.
10402 This function assumes that you cleared the MPFR flags and then
10403 calculated M to see if anything subsequently set a flag prior to
10404 entering this function. Return NULL_TREE if any checks fail, if
10405 FORCE_CONVERT is true, then bypass the checks. */
10407 static tree
10408 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10410 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10411 overflow/underflow occurred. If -frounding-math, proceed iff the
10412 result of calling FUNC was exact. */
10413 if (force_convert
10414 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10415 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10416 && (!flag_rounding_math || !inexact)))
10418 REAL_VALUE_TYPE re, im;
10420 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10421 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10422 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10423 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10424 but the mpft_t is not, then we underflowed in the
10425 conversion. */
10426 if (force_convert
10427 || (real_isfinite (&re) && real_isfinite (&im)
10428 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10429 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10431 REAL_VALUE_TYPE re_mode, im_mode;
10433 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10434 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10435 /* Proceed iff the specified mode can hold the value. */
10436 if (force_convert
10437 || (real_identical (&re_mode, &re)
10438 && real_identical (&im_mode, &im)))
10439 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10440 build_real (TREE_TYPE (type), im_mode));
10443 return NULL_TREE;
10446 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10447 the pointer *(ARG_QUO) and return the result. The type is taken
10448 from the type of ARG0 and is used for setting the precision of the
10449 calculation and results. */
10451 static tree
10452 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10454 tree const type = TREE_TYPE (arg0);
10455 tree result = NULL_TREE;
10457 STRIP_NOPS (arg0);
10458 STRIP_NOPS (arg1);
10460 /* To proceed, MPFR must exactly represent the target floating point
10461 format, which only happens when the target base equals two. */
10462 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10463 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10464 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10466 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10467 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10469 if (real_isfinite (ra0) && real_isfinite (ra1))
10471 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10472 const int prec = fmt->p;
10473 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10474 tree result_rem;
10475 long integer_quo;
10476 mpfr_t m0, m1;
10478 mpfr_inits2 (prec, m0, m1, NULL);
10479 mpfr_from_real (m0, ra0, MPFR_RNDN);
10480 mpfr_from_real (m1, ra1, MPFR_RNDN);
10481 mpfr_clear_flags ();
10482 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10483 /* Remquo is independent of the rounding mode, so pass
10484 inexact=0 to do_mpfr_ckconv(). */
10485 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10486 mpfr_clears (m0, m1, NULL);
10487 if (result_rem)
10489 /* MPFR calculates quo in the host's long so it may
10490 return more bits in quo than the target int can hold
10491 if sizeof(host long) > sizeof(target int). This can
10492 happen even for native compilers in LP64 mode. In
10493 these cases, modulo the quo value with the largest
10494 number that the target int can hold while leaving one
10495 bit for the sign. */
10496 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10497 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10499 /* Dereference the quo pointer argument. */
10500 arg_quo = build_fold_indirect_ref (arg_quo);
10501 /* Proceed iff a valid pointer type was passed in. */
10502 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10504 /* Set the value. */
10505 tree result_quo
10506 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10507 build_int_cst (TREE_TYPE (arg_quo),
10508 integer_quo));
10509 TREE_SIDE_EFFECTS (result_quo) = 1;
10510 /* Combine the quo assignment with the rem. */
10511 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10512 result_quo, result_rem));
10517 return result;
10520 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10521 resulting value as a tree with type TYPE. The mpfr precision is
10522 set to the precision of TYPE. We assume that this mpfr function
10523 returns zero if the result could be calculated exactly within the
10524 requested precision. In addition, the integer pointer represented
10525 by ARG_SG will be dereferenced and set to the appropriate signgam
10526 (-1,1) value. */
10528 static tree
10529 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10531 tree result = NULL_TREE;
10533 STRIP_NOPS (arg);
10535 /* To proceed, MPFR must exactly represent the target floating point
10536 format, which only happens when the target base equals two. Also
10537 verify ARG is a constant and that ARG_SG is an int pointer. */
10538 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10539 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10540 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10541 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10543 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10545 /* In addition to NaN and Inf, the argument cannot be zero or a
10546 negative integer. */
10547 if (real_isfinite (ra)
10548 && ra->cl != rvc_zero
10549 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10551 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10552 const int prec = fmt->p;
10553 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10554 int inexact, sg;
10555 mpfr_t m;
10556 tree result_lg;
10558 mpfr_init2 (m, prec);
10559 mpfr_from_real (m, ra, MPFR_RNDN);
10560 mpfr_clear_flags ();
10561 inexact = mpfr_lgamma (m, &sg, m, rnd);
10562 result_lg = do_mpfr_ckconv (m, type, inexact);
10563 mpfr_clear (m);
10564 if (result_lg)
10566 tree result_sg;
10568 /* Dereference the arg_sg pointer argument. */
10569 arg_sg = build_fold_indirect_ref (arg_sg);
10570 /* Assign the signgam value into *arg_sg. */
10571 result_sg = fold_build2 (MODIFY_EXPR,
10572 TREE_TYPE (arg_sg), arg_sg,
10573 build_int_cst (TREE_TYPE (arg_sg), sg));
10574 TREE_SIDE_EFFECTS (result_sg) = 1;
10575 /* Combine the signgam assignment with the lgamma result. */
10576 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10577 result_sg, result_lg));
10582 return result;
10585 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10586 mpc function FUNC on it and return the resulting value as a tree
10587 with type TYPE. The mpfr precision is set to the precision of
10588 TYPE. We assume that function FUNC returns zero if the result
10589 could be calculated exactly within the requested precision. If
10590 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10591 in the arguments and/or results. */
10593 tree
10594 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10595 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10597 tree result = NULL_TREE;
10599 STRIP_NOPS (arg0);
10600 STRIP_NOPS (arg1);
10602 /* To proceed, MPFR must exactly represent the target floating point
10603 format, which only happens when the target base equals two. */
10604 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10605 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10606 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10607 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10608 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10610 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10611 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10612 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10613 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10615 if (do_nonfinite
10616 || (real_isfinite (re0) && real_isfinite (im0)
10617 && real_isfinite (re1) && real_isfinite (im1)))
10619 const struct real_format *const fmt =
10620 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10621 const int prec = fmt->p;
10622 const mpfr_rnd_t rnd = fmt->round_towards_zero
10623 ? MPFR_RNDZ : MPFR_RNDN;
10624 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10625 int inexact;
10626 mpc_t m0, m1;
10628 mpc_init2 (m0, prec);
10629 mpc_init2 (m1, prec);
10630 mpfr_from_real (mpc_realref (m0), re0, rnd);
10631 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10632 mpfr_from_real (mpc_realref (m1), re1, rnd);
10633 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10634 mpfr_clear_flags ();
10635 inexact = func (m0, m0, m1, crnd);
10636 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10637 mpc_clear (m0);
10638 mpc_clear (m1);
10642 return result;
10645 /* A wrapper function for builtin folding that prevents warnings for
10646 "statement without effect" and the like, caused by removing the
10647 call node earlier than the warning is generated. */
10649 tree
10650 fold_call_stmt (gcall *stmt, bool ignore)
10652 tree ret = NULL_TREE;
10653 tree fndecl = gimple_call_fndecl (stmt);
10654 location_t loc = gimple_location (stmt);
10655 if (fndecl && fndecl_built_in_p (fndecl)
10656 && !gimple_call_va_arg_pack_p (stmt))
10658 int nargs = gimple_call_num_args (stmt);
10659 tree *args = (nargs > 0
10660 ? gimple_call_arg_ptr (stmt, 0)
10661 : &error_mark_node);
10663 if (avoid_folding_inline_builtin (fndecl))
10664 return NULL_TREE;
10665 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10667 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10669 else
10671 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10672 if (ret)
10674 /* Propagate location information from original call to
10675 expansion of builtin. Otherwise things like
10676 maybe_emit_chk_warning, that operate on the expansion
10677 of a builtin, will use the wrong location information. */
10678 if (gimple_has_location (stmt))
10680 tree realret = ret;
10681 if (TREE_CODE (ret) == NOP_EXPR)
10682 realret = TREE_OPERAND (ret, 0);
10683 if (CAN_HAVE_LOCATION_P (realret)
10684 && !EXPR_HAS_LOCATION (realret))
10685 SET_EXPR_LOCATION (realret, loc);
10686 return realret;
10688 return ret;
10692 return NULL_TREE;
10695 /* Look up the function in builtin_decl that corresponds to DECL
10696 and set ASMSPEC as its user assembler name. DECL must be a
10697 function decl that declares a builtin. */
10699 void
10700 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10702 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10703 && asmspec != 0);
10705 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10706 set_user_assembler_name (builtin, asmspec);
10708 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10709 && INT_TYPE_SIZE < BITS_PER_WORD)
10711 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10712 set_user_assembler_libfunc ("ffs", asmspec);
10713 set_optab_libfunc (ffs_optab, mode, "ffs");
10717 /* Return true if DECL is a builtin that expands to a constant or similarly
10718 simple code. */
10719 bool
10720 is_simple_builtin (tree decl)
10722 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10723 switch (DECL_FUNCTION_CODE (decl))
10725 /* Builtins that expand to constants. */
10726 case BUILT_IN_CONSTANT_P:
10727 case BUILT_IN_EXPECT:
10728 case BUILT_IN_OBJECT_SIZE:
10729 case BUILT_IN_UNREACHABLE:
10730 /* Simple register moves or loads from stack. */
10731 case BUILT_IN_ASSUME_ALIGNED:
10732 case BUILT_IN_RETURN_ADDRESS:
10733 case BUILT_IN_EXTRACT_RETURN_ADDR:
10734 case BUILT_IN_FROB_RETURN_ADDR:
10735 case BUILT_IN_RETURN:
10736 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10737 case BUILT_IN_FRAME_ADDRESS:
10738 case BUILT_IN_VA_END:
10739 case BUILT_IN_STACK_SAVE:
10740 case BUILT_IN_STACK_RESTORE:
10741 /* Exception state returns or moves registers around. */
10742 case BUILT_IN_EH_FILTER:
10743 case BUILT_IN_EH_POINTER:
10744 case BUILT_IN_EH_COPY_VALUES:
10745 return true;
10747 default:
10748 return false;
10751 return false;
10754 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10755 most probably expanded inline into reasonably simple code. This is a
10756 superset of is_simple_builtin. */
10757 bool
10758 is_inexpensive_builtin (tree decl)
10760 if (!decl)
10761 return false;
10762 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10763 return true;
10764 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10765 switch (DECL_FUNCTION_CODE (decl))
10767 case BUILT_IN_ABS:
10768 CASE_BUILT_IN_ALLOCA:
10769 case BUILT_IN_BSWAP16:
10770 case BUILT_IN_BSWAP32:
10771 case BUILT_IN_BSWAP64:
10772 case BUILT_IN_BSWAP128:
10773 case BUILT_IN_CLZ:
10774 case BUILT_IN_CLZIMAX:
10775 case BUILT_IN_CLZL:
10776 case BUILT_IN_CLZLL:
10777 case BUILT_IN_CTZ:
10778 case BUILT_IN_CTZIMAX:
10779 case BUILT_IN_CTZL:
10780 case BUILT_IN_CTZLL:
10781 case BUILT_IN_FFS:
10782 case BUILT_IN_FFSIMAX:
10783 case BUILT_IN_FFSL:
10784 case BUILT_IN_FFSLL:
10785 case BUILT_IN_IMAXABS:
10786 case BUILT_IN_FINITE:
10787 case BUILT_IN_FINITEF:
10788 case BUILT_IN_FINITEL:
10789 case BUILT_IN_FINITED32:
10790 case BUILT_IN_FINITED64:
10791 case BUILT_IN_FINITED128:
10792 case BUILT_IN_FPCLASSIFY:
10793 case BUILT_IN_ISFINITE:
10794 case BUILT_IN_ISINF_SIGN:
10795 case BUILT_IN_ISINF:
10796 case BUILT_IN_ISINFF:
10797 case BUILT_IN_ISINFL:
10798 case BUILT_IN_ISINFD32:
10799 case BUILT_IN_ISINFD64:
10800 case BUILT_IN_ISINFD128:
10801 case BUILT_IN_ISNAN:
10802 case BUILT_IN_ISNANF:
10803 case BUILT_IN_ISNANL:
10804 case BUILT_IN_ISNAND32:
10805 case BUILT_IN_ISNAND64:
10806 case BUILT_IN_ISNAND128:
10807 case BUILT_IN_ISNORMAL:
10808 case BUILT_IN_ISGREATER:
10809 case BUILT_IN_ISGREATEREQUAL:
10810 case BUILT_IN_ISLESS:
10811 case BUILT_IN_ISLESSEQUAL:
10812 case BUILT_IN_ISLESSGREATER:
10813 case BUILT_IN_ISUNORDERED:
10814 case BUILT_IN_VA_ARG_PACK:
10815 case BUILT_IN_VA_ARG_PACK_LEN:
10816 case BUILT_IN_VA_COPY:
10817 case BUILT_IN_TRAP:
10818 case BUILT_IN_SAVEREGS:
10819 case BUILT_IN_POPCOUNTL:
10820 case BUILT_IN_POPCOUNTLL:
10821 case BUILT_IN_POPCOUNTIMAX:
10822 case BUILT_IN_POPCOUNT:
10823 case BUILT_IN_PARITYL:
10824 case BUILT_IN_PARITYLL:
10825 case BUILT_IN_PARITYIMAX:
10826 case BUILT_IN_PARITY:
10827 case BUILT_IN_LABS:
10828 case BUILT_IN_LLABS:
10829 case BUILT_IN_PREFETCH:
10830 case BUILT_IN_ACC_ON_DEVICE:
10831 return true;
10833 default:
10834 return is_simple_builtin (decl);
10837 return false;
10840 /* Return true if T is a constant and the value cast to a target char
10841 can be represented by a host char.
10842 Store the casted char constant in *P if so. */
10844 bool
10845 target_char_cst_p (tree t, char *p)
10847 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10848 return false;
10850 *p = (char)tree_to_uhwi (t);
10851 return true;
10854 /* Return true if the builtin DECL is implemented in a standard library.
10855 Otherwise return false which doesn't guarantee it is not (thus the list
10856 of handled builtins below may be incomplete). */
10858 bool
10859 builtin_with_linkage_p (tree decl)
10861 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10862 switch (DECL_FUNCTION_CODE (decl))
10864 CASE_FLT_FN (BUILT_IN_ACOS):
10865 CASE_FLT_FN (BUILT_IN_ACOSH):
10866 CASE_FLT_FN (BUILT_IN_ASIN):
10867 CASE_FLT_FN (BUILT_IN_ASINH):
10868 CASE_FLT_FN (BUILT_IN_ATAN):
10869 CASE_FLT_FN (BUILT_IN_ATANH):
10870 CASE_FLT_FN (BUILT_IN_ATAN2):
10871 CASE_FLT_FN (BUILT_IN_CBRT):
10872 CASE_FLT_FN (BUILT_IN_CEIL):
10873 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
10874 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10875 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
10876 CASE_FLT_FN (BUILT_IN_COS):
10877 CASE_FLT_FN (BUILT_IN_COSH):
10878 CASE_FLT_FN (BUILT_IN_ERF):
10879 CASE_FLT_FN (BUILT_IN_ERFC):
10880 CASE_FLT_FN (BUILT_IN_EXP):
10881 CASE_FLT_FN (BUILT_IN_EXP2):
10882 CASE_FLT_FN (BUILT_IN_EXPM1):
10883 CASE_FLT_FN (BUILT_IN_FABS):
10884 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10885 CASE_FLT_FN (BUILT_IN_FDIM):
10886 CASE_FLT_FN (BUILT_IN_FLOOR):
10887 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
10888 CASE_FLT_FN (BUILT_IN_FMA):
10889 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
10890 CASE_FLT_FN (BUILT_IN_FMAX):
10891 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
10892 CASE_FLT_FN (BUILT_IN_FMIN):
10893 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
10894 CASE_FLT_FN (BUILT_IN_FMOD):
10895 CASE_FLT_FN (BUILT_IN_FREXP):
10896 CASE_FLT_FN (BUILT_IN_HYPOT):
10897 CASE_FLT_FN (BUILT_IN_ILOGB):
10898 CASE_FLT_FN (BUILT_IN_LDEXP):
10899 CASE_FLT_FN (BUILT_IN_LGAMMA):
10900 CASE_FLT_FN (BUILT_IN_LLRINT):
10901 CASE_FLT_FN (BUILT_IN_LLROUND):
10902 CASE_FLT_FN (BUILT_IN_LOG):
10903 CASE_FLT_FN (BUILT_IN_LOG10):
10904 CASE_FLT_FN (BUILT_IN_LOG1P):
10905 CASE_FLT_FN (BUILT_IN_LOG2):
10906 CASE_FLT_FN (BUILT_IN_LOGB):
10907 CASE_FLT_FN (BUILT_IN_LRINT):
10908 CASE_FLT_FN (BUILT_IN_LROUND):
10909 CASE_FLT_FN (BUILT_IN_MODF):
10910 CASE_FLT_FN (BUILT_IN_NAN):
10911 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10912 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
10913 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
10914 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
10915 CASE_FLT_FN (BUILT_IN_POW):
10916 CASE_FLT_FN (BUILT_IN_REMAINDER):
10917 CASE_FLT_FN (BUILT_IN_REMQUO):
10918 CASE_FLT_FN (BUILT_IN_RINT):
10919 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
10920 CASE_FLT_FN (BUILT_IN_ROUND):
10921 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
10922 CASE_FLT_FN (BUILT_IN_SCALBLN):
10923 CASE_FLT_FN (BUILT_IN_SCALBN):
10924 CASE_FLT_FN (BUILT_IN_SIN):
10925 CASE_FLT_FN (BUILT_IN_SINH):
10926 CASE_FLT_FN (BUILT_IN_SINCOS):
10927 CASE_FLT_FN (BUILT_IN_SQRT):
10928 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
10929 CASE_FLT_FN (BUILT_IN_TAN):
10930 CASE_FLT_FN (BUILT_IN_TANH):
10931 CASE_FLT_FN (BUILT_IN_TGAMMA):
10932 CASE_FLT_FN (BUILT_IN_TRUNC):
10933 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
10934 return true;
10936 case BUILT_IN_STPCPY:
10937 case BUILT_IN_STPNCPY:
10938 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
10939 by libiberty's stpcpy.c for MinGW targets so we need to return true
10940 in order to be able to build libiberty in LTO mode for them. */
10941 return true;
10943 default:
10944 break;
10946 return false;
10949 /* Return true if OFFRNG is bounded to a subrange of offset values
10950 valid for the largest possible object. */
10952 bool
10953 access_ref::offset_bounded () const
10955 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
10956 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
10957 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
10960 /* If CALLEE has known side effects, fill in INFO and return true.
10961 See tree-ssa-structalias.c:find_func_aliases
10962 for the list of builtins we might need to handle here. */
10964 attr_fnspec
10965 builtin_fnspec (tree callee)
10967 built_in_function code = DECL_FUNCTION_CODE (callee);
10969 switch (code)
10971 /* All the following functions read memory pointed to by
10972 their second argument and write memory pointed to by first
10973 argument.
10974 strcat/strncat additionally reads memory pointed to by the first
10975 argument. */
10976 case BUILT_IN_STRCAT:
10977 case BUILT_IN_STRCAT_CHK:
10978 return "1cW 1 ";
10979 case BUILT_IN_STRNCAT:
10980 case BUILT_IN_STRNCAT_CHK:
10981 return "1cW 13";
10982 case BUILT_IN_STRCPY:
10983 case BUILT_IN_STRCPY_CHK:
10984 return "1cO 1 ";
10985 case BUILT_IN_STPCPY:
10986 case BUILT_IN_STPCPY_CHK:
10987 return ".cO 1 ";
10988 case BUILT_IN_STRNCPY:
10989 case BUILT_IN_MEMCPY:
10990 case BUILT_IN_MEMMOVE:
10991 case BUILT_IN_TM_MEMCPY:
10992 case BUILT_IN_TM_MEMMOVE:
10993 case BUILT_IN_STRNCPY_CHK:
10994 case BUILT_IN_MEMCPY_CHK:
10995 case BUILT_IN_MEMMOVE_CHK:
10996 return "1cO313";
10997 case BUILT_IN_MEMPCPY:
10998 case BUILT_IN_MEMPCPY_CHK:
10999 return ".cO313";
11000 case BUILT_IN_STPNCPY:
11001 case BUILT_IN_STPNCPY_CHK:
11002 return ".cO313";
11003 case BUILT_IN_BCOPY:
11004 return ".c23O3";
11005 case BUILT_IN_BZERO:
11006 return ".cO2";
11007 case BUILT_IN_MEMCMP:
11008 case BUILT_IN_MEMCMP_EQ:
11009 case BUILT_IN_BCMP:
11010 case BUILT_IN_STRNCMP:
11011 case BUILT_IN_STRNCMP_EQ:
11012 case BUILT_IN_STRNCASECMP:
11013 return ".cR3R3";
11015 /* The following functions read memory pointed to by their
11016 first argument. */
11017 CASE_BUILT_IN_TM_LOAD (1):
11018 CASE_BUILT_IN_TM_LOAD (2):
11019 CASE_BUILT_IN_TM_LOAD (4):
11020 CASE_BUILT_IN_TM_LOAD (8):
11021 CASE_BUILT_IN_TM_LOAD (FLOAT):
11022 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11023 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11024 CASE_BUILT_IN_TM_LOAD (M64):
11025 CASE_BUILT_IN_TM_LOAD (M128):
11026 CASE_BUILT_IN_TM_LOAD (M256):
11027 case BUILT_IN_TM_LOG:
11028 case BUILT_IN_TM_LOG_1:
11029 case BUILT_IN_TM_LOG_2:
11030 case BUILT_IN_TM_LOG_4:
11031 case BUILT_IN_TM_LOG_8:
11032 case BUILT_IN_TM_LOG_FLOAT:
11033 case BUILT_IN_TM_LOG_DOUBLE:
11034 case BUILT_IN_TM_LOG_LDOUBLE:
11035 case BUILT_IN_TM_LOG_M64:
11036 case BUILT_IN_TM_LOG_M128:
11037 case BUILT_IN_TM_LOG_M256:
11038 return ".cR ";
11040 case BUILT_IN_INDEX:
11041 case BUILT_IN_RINDEX:
11042 case BUILT_IN_STRCHR:
11043 case BUILT_IN_STRLEN:
11044 case BUILT_IN_STRRCHR:
11045 return ".cR ";
11046 case BUILT_IN_STRNLEN:
11047 return ".cR2";
11049 /* These read memory pointed to by the first argument.
11050 Allocating memory does not have any side-effects apart from
11051 being the definition point for the pointer.
11052 Unix98 specifies that errno is set on allocation failure. */
11053 case BUILT_IN_STRDUP:
11054 return "mCR ";
11055 case BUILT_IN_STRNDUP:
11056 return "mCR2";
11057 /* Allocating memory does not have any side-effects apart from
11058 being the definition point for the pointer. */
11059 case BUILT_IN_MALLOC:
11060 case BUILT_IN_ALIGNED_ALLOC:
11061 case BUILT_IN_CALLOC:
11062 case BUILT_IN_GOMP_ALLOC:
11063 return "mC";
11064 CASE_BUILT_IN_ALLOCA:
11065 return "mc";
11066 /* These read memory pointed to by the first argument with size
11067 in the third argument. */
11068 case BUILT_IN_MEMCHR:
11069 return ".cR3";
11070 /* These read memory pointed to by the first and second arguments. */
11071 case BUILT_IN_STRSTR:
11072 case BUILT_IN_STRPBRK:
11073 case BUILT_IN_STRCASECMP:
11074 case BUILT_IN_STRCSPN:
11075 case BUILT_IN_STRSPN:
11076 case BUILT_IN_STRCMP:
11077 case BUILT_IN_STRCMP_EQ:
11078 return ".cR R ";
11079 /* Freeing memory kills the pointed-to memory. More importantly
11080 the call has to serve as a barrier for moving loads and stores
11081 across it. */
11082 case BUILT_IN_STACK_RESTORE:
11083 case BUILT_IN_FREE:
11084 case BUILT_IN_GOMP_FREE:
11085 return ".co ";
11086 case BUILT_IN_VA_END:
11087 return ".cO ";
11088 /* Realloc serves both as allocation point and deallocation point. */
11089 case BUILT_IN_REALLOC:
11090 return ".Cw ";
11091 case BUILT_IN_GAMMA_R:
11092 case BUILT_IN_GAMMAF_R:
11093 case BUILT_IN_GAMMAL_R:
11094 case BUILT_IN_LGAMMA_R:
11095 case BUILT_IN_LGAMMAF_R:
11096 case BUILT_IN_LGAMMAL_R:
11097 return ".C. Ot";
11098 case BUILT_IN_FREXP:
11099 case BUILT_IN_FREXPF:
11100 case BUILT_IN_FREXPL:
11101 case BUILT_IN_MODF:
11102 case BUILT_IN_MODFF:
11103 case BUILT_IN_MODFL:
11104 return ".c. Ot";
11105 case BUILT_IN_REMQUO:
11106 case BUILT_IN_REMQUOF:
11107 case BUILT_IN_REMQUOL:
11108 return ".c. . Ot";
11109 case BUILT_IN_SINCOS:
11110 case BUILT_IN_SINCOSF:
11111 case BUILT_IN_SINCOSL:
11112 return ".c. OtOt";
11113 case BUILT_IN_MEMSET:
11114 case BUILT_IN_MEMSET_CHK:
11115 case BUILT_IN_TM_MEMSET:
11116 return "1cO3";
11117 CASE_BUILT_IN_TM_STORE (1):
11118 CASE_BUILT_IN_TM_STORE (2):
11119 CASE_BUILT_IN_TM_STORE (4):
11120 CASE_BUILT_IN_TM_STORE (8):
11121 CASE_BUILT_IN_TM_STORE (FLOAT):
11122 CASE_BUILT_IN_TM_STORE (DOUBLE):
11123 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11124 CASE_BUILT_IN_TM_STORE (M64):
11125 CASE_BUILT_IN_TM_STORE (M128):
11126 CASE_BUILT_IN_TM_STORE (M256):
11127 return ".cO ";
11128 case BUILT_IN_STACK_SAVE:
11129 return ".c";
11130 case BUILT_IN_ASSUME_ALIGNED:
11131 return "1cX ";
11132 /* But posix_memalign stores a pointer into the memory pointed to
11133 by its first argument. */
11134 case BUILT_IN_POSIX_MEMALIGN:
11135 return ".cOt";
11137 default:
11138 return "";