Default to dwarf version 4 on hppa64-hpux
[official-gcc.git] / gcc / builtins.c
blob3e57eb03af039f6a1ee5d390f0ab630c54b70cdd
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static rtx get_memory_rtx (tree, tree);
109 static int apply_args_size (void);
110 static int apply_result_size (void);
111 static rtx result_vector (int, rtx);
112 static void expand_builtin_prefetch (tree);
113 static rtx expand_builtin_apply_args (void);
114 static rtx expand_builtin_apply_args_1 (void);
115 static rtx expand_builtin_apply (rtx, rtx, rtx);
116 static void expand_builtin_return (rtx);
117 static enum type_class type_to_class (tree);
118 static rtx expand_builtin_classify_type (tree);
119 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
121 static rtx expand_builtin_interclass_mathfn (tree, rtx);
122 static rtx expand_builtin_sincos (tree);
123 static rtx expand_builtin_cexpi (tree, rtx);
124 static rtx expand_builtin_int_roundingfn (tree, rtx);
125 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
126 static rtx expand_builtin_next_arg (void);
127 static rtx expand_builtin_va_start (tree);
128 static rtx expand_builtin_va_end (tree);
129 static rtx expand_builtin_va_copy (tree);
130 static rtx inline_expand_builtin_bytecmp (tree, rtx);
131 static rtx expand_builtin_strcmp (tree, rtx);
132 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcpy (tree, rtx);
142 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
143 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
146 static rtx expand_builtin_bzero (tree);
147 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
149 static rtx expand_builtin_alloca (tree);
150 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
151 static rtx expand_builtin_frame_address (tree, tree);
152 static tree stabilize_va_list_loc (location_t, tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static rtx expand_builtin_expect_with_probability (tree, rtx);
155 static tree fold_builtin_constant_p (tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree, tree, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
160 static bool validate_arg (const_tree, enum tree_code code);
161 static rtx expand_builtin_fabs (tree, rtx, rtx);
162 static rtx expand_builtin_signbit (tree, rtx);
163 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
164 static tree fold_builtin_isascii (location_t, tree);
165 static tree fold_builtin_toascii (location_t, tree);
166 static tree fold_builtin_isdigit (location_t, tree);
167 static tree fold_builtin_fabs (location_t, tree, tree);
168 static tree fold_builtin_abs (location_t, tree, tree);
169 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
170 enum tree_code);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 return (startswith (name, "__builtin_")
201 || startswith (name, "__sync_")
202 || startswith (name, "__atomic_"));
205 /* Return true if NODE should be considered for inline expansion regardless
206 of the optimization level. This means whenever a function is invoked with
207 its "internal" name, which normally contains the prefix "__builtin". */
209 bool
210 called_as_built_in (tree node)
212 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
213 we want the name used to call the function, not the name it
214 will have. */
215 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
216 return is_builtin_name (name);
219 /* Compute values M and N such that M divides (address of EXP - N) and such
220 that N < M. If these numbers can be determined, store M in alignp and N in
221 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
222 *alignp and any bit-offset to *bitposp.
224 Note that the address (and thus the alignment) computed here is based
225 on the address to which a symbol resolves, whereas DECL_ALIGN is based
226 on the address at which an object is actually located. These two
227 addresses are not always the same. For example, on ARM targets,
228 the address &foo of a Thumb function foo() has the lowest bit set,
229 whereas foo() itself starts on an even address.
231 If ADDR_P is true we are taking the address of the memory reference EXP
232 and thus cannot rely on the access taking place. */
234 static bool
235 get_object_alignment_2 (tree exp, unsigned int *alignp,
236 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
238 poly_int64 bitsize, bitpos;
239 tree offset;
240 machine_mode mode;
241 int unsignedp, reversep, volatilep;
242 unsigned int align = BITS_PER_UNIT;
243 bool known_alignment = false;
245 /* Get the innermost object and the constant (bitpos) and possibly
246 variable (offset) offset of the access. */
247 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
248 &unsignedp, &reversep, &volatilep);
250 /* Extract alignment information from the innermost object and
251 possibly adjust bitpos and offset. */
252 if (TREE_CODE (exp) == FUNCTION_DECL)
254 /* Function addresses can encode extra information besides their
255 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
256 allows the low bit to be used as a virtual bit, we know
257 that the address itself must be at least 2-byte aligned. */
258 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
259 align = 2 * BITS_PER_UNIT;
261 else if (TREE_CODE (exp) == LABEL_DECL)
263 else if (TREE_CODE (exp) == CONST_DECL)
265 /* The alignment of a CONST_DECL is determined by its initializer. */
266 exp = DECL_INITIAL (exp);
267 align = TYPE_ALIGN (TREE_TYPE (exp));
268 if (CONSTANT_CLASS_P (exp))
269 align = targetm.constant_alignment (exp, align);
271 known_alignment = true;
273 else if (DECL_P (exp))
275 align = DECL_ALIGN (exp);
276 known_alignment = true;
278 else if (TREE_CODE (exp) == INDIRECT_REF
279 || TREE_CODE (exp) == MEM_REF
280 || TREE_CODE (exp) == TARGET_MEM_REF)
282 tree addr = TREE_OPERAND (exp, 0);
283 unsigned ptr_align;
284 unsigned HOST_WIDE_INT ptr_bitpos;
285 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
287 /* If the address is explicitely aligned, handle that. */
288 if (TREE_CODE (addr) == BIT_AND_EXPR
289 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
291 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
292 ptr_bitmask *= BITS_PER_UNIT;
293 align = least_bit_hwi (ptr_bitmask);
294 addr = TREE_OPERAND (addr, 0);
297 known_alignment
298 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
299 align = MAX (ptr_align, align);
301 /* Re-apply explicit alignment to the bitpos. */
302 ptr_bitpos &= ptr_bitmask;
304 /* The alignment of the pointer operand in a TARGET_MEM_REF
305 has to take the variable offset parts into account. */
306 if (TREE_CODE (exp) == TARGET_MEM_REF)
308 if (TMR_INDEX (exp))
310 unsigned HOST_WIDE_INT step = 1;
311 if (TMR_STEP (exp))
312 step = TREE_INT_CST_LOW (TMR_STEP (exp));
313 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
315 if (TMR_INDEX2 (exp))
316 align = BITS_PER_UNIT;
317 known_alignment = false;
320 /* When EXP is an actual memory reference then we can use
321 TYPE_ALIGN of a pointer indirection to derive alignment.
322 Do so only if get_pointer_alignment_1 did not reveal absolute
323 alignment knowledge and if using that alignment would
324 improve the situation. */
325 unsigned int talign;
326 if (!addr_p && !known_alignment
327 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
328 && talign > align)
329 align = talign;
330 else
332 /* Else adjust bitpos accordingly. */
333 bitpos += ptr_bitpos;
334 if (TREE_CODE (exp) == MEM_REF
335 || TREE_CODE (exp) == TARGET_MEM_REF)
336 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
339 else if (TREE_CODE (exp) == STRING_CST)
341 /* STRING_CST are the only constant objects we allow to be not
342 wrapped inside a CONST_DECL. */
343 align = TYPE_ALIGN (TREE_TYPE (exp));
344 if (CONSTANT_CLASS_P (exp))
345 align = targetm.constant_alignment (exp, align);
347 known_alignment = true;
350 /* If there is a non-constant offset part extract the maximum
351 alignment that can prevail. */
352 if (offset)
354 unsigned int trailing_zeros = tree_ctz (offset);
355 if (trailing_zeros < HOST_BITS_PER_INT)
357 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
358 if (inner)
359 align = MIN (align, inner);
363 /* Account for the alignment of runtime coefficients, so that the constant
364 bitpos is guaranteed to be accurate. */
365 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
366 if (alt_align != 0 && alt_align < align)
368 align = alt_align;
369 known_alignment = false;
372 *alignp = align;
373 *bitposp = bitpos.coeffs[0] & (align - 1);
374 return known_alignment;
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
382 bool
383 get_object_alignment_1 (tree exp, unsigned int *alignp,
384 unsigned HOST_WIDE_INT *bitposp)
386 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
387 with it. */
388 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
389 exp = TREE_OPERAND (exp, 0);
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 /* Return the alignment in bits of EXP, an object. */
395 unsigned int
396 get_object_alignment (tree exp)
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
401 get_object_alignment_1 (exp, &align, &bitpos);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
422 STRIP_NOPS (exp);
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
507 return align;
510 /* Return the number of leading non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
514 unsigned
515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
519 unsigned n;
521 if (eltsize == 1)
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
531 else
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
540 return n;
543 /* Compute the length of a null-terminated character string or wide
544 character string handling character sizes of 1, 2, and 4 bytes.
545 TREE_STRING_LENGTH is not the right way because it evaluates to
546 the size of the character array in bytes (as opposed to characters)
547 and because it can contain a zero byte in the middle.
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
556 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
557 accesses. Note that this implies the result is not going to be emitted
558 into the instruction stream.
560 Additional information about the string accessed may be recorded
561 in DATA. For example, if ARG references an unterminated string,
562 then the declaration will be stored in the DECL field. If the
563 length of the unterminated string can be determined, it'll be
564 stored in the LEN field. Note this length could well be different
565 than what a C strlen call would return.
567 ELTSIZE is 1 for normal single byte character strings, and 2 or
568 4 for wide characer strings. ELTSIZE is by default 1.
570 The value returned is of type `ssizetype'. */
572 tree
573 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
575 /* If we were not passed a DATA pointer, then get one to a local
576 structure. That avoids having to check DATA for NULL before
577 each time we want to use it. */
578 c_strlen_data local_strlen_data = { };
579 if (!data)
580 data = &local_strlen_data;
582 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
584 tree src = STRIP_NOPS (arg);
585 if (TREE_CODE (src) == COND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
588 tree len1, len2;
590 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
591 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
592 if (tree_int_cst_equal (len1, len2))
593 return len1;
596 if (TREE_CODE (src) == COMPOUND_EXPR
597 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
598 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
600 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
602 /* Offset from the beginning of the string in bytes. */
603 tree byteoff;
604 tree memsize;
605 tree decl;
606 src = string_constant (src, &byteoff, &memsize, &decl);
607 if (src == 0)
608 return NULL_TREE;
610 /* Determine the size of the string element. */
611 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
612 return NULL_TREE;
614 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
615 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
616 in case the latter is less than the size of the array, such as when
617 SRC refers to a short string literal used to initialize a large array.
618 In that case, the elements of the array after the terminating NUL are
619 all NUL. */
620 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
621 strelts = strelts / eltsize;
623 if (!tree_fits_uhwi_p (memsize))
624 return NULL_TREE;
626 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
628 /* PTR can point to the byte representation of any string type, including
629 char* and wchar_t*. */
630 const char *ptr = TREE_STRING_POINTER (src);
632 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
634 /* The code below works only for single byte character types. */
635 if (eltsize != 1)
636 return NULL_TREE;
638 /* If the string has an internal NUL character followed by any
639 non-NUL characters (e.g., "foo\0bar"), we can't compute
640 the offset to the following NUL if we don't know where to
641 start searching for it. */
642 unsigned len = string_length (ptr, eltsize, strelts);
644 /* Return when an embedded null character is found or none at all.
645 In the latter case, set the DECL/LEN field in the DATA structure
646 so that callers may examine them. */
647 if (len + 1 < strelts)
648 return NULL_TREE;
649 else if (len >= maxelts)
651 data->decl = decl;
652 data->off = byteoff;
653 data->minlen = ssize_int (len);
654 return NULL_TREE;
657 /* For empty strings the result should be zero. */
658 if (len == 0)
659 return ssize_int (0);
661 /* We don't know the starting offset, but we do know that the string
662 has no internal zero bytes. If the offset falls within the bounds
663 of the string subtract the offset from the length of the string,
664 and return that. Otherwise the length is zero. Take care to
665 use SAVE_EXPR in case the OFFSET has side-effects. */
666 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
667 : byteoff;
668 offsave = fold_convert_loc (loc, sizetype, offsave);
669 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
670 size_int (len));
671 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
672 offsave);
673 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
674 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
675 build_zero_cst (ssizetype));
678 /* Offset from the beginning of the string in elements. */
679 HOST_WIDE_INT eltoff;
681 /* We have a known offset into the string. Start searching there for
682 a null character if we can represent it as a single HOST_WIDE_INT. */
683 if (byteoff == 0)
684 eltoff = 0;
685 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
686 eltoff = -1;
687 else
688 eltoff = tree_to_uhwi (byteoff) / eltsize;
690 /* If the offset is known to be out of bounds, warn, and call strlen at
691 runtime. */
692 if (eltoff < 0 || eltoff >= maxelts)
694 /* Suppress multiple warnings for propagated constant strings. */
695 if (only_value != 2
696 && !warning_suppressed_p (arg, OPT_Warray_bounds)
697 && warning_at (loc, OPT_Warray_bounds,
698 "offset %qwi outside bounds of constant string",
699 eltoff))
701 if (decl)
702 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
703 suppress_warning (arg, OPT_Warray_bounds);
705 return NULL_TREE;
708 /* If eltoff is larger than strelts but less than maxelts the
709 string length is zero, since the excess memory will be zero. */
710 if (eltoff > strelts)
711 return ssize_int (0);
713 /* Use strlen to search for the first zero byte. Since any strings
714 constructed with build_string will have nulls appended, we win even
715 if we get handed something like (char[4])"abcd".
717 Since ELTOFF is our starting index into the string, no further
718 calculation is needed. */
719 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
720 strelts - eltoff);
722 /* Don't know what to return if there was no zero termination.
723 Ideally this would turn into a gcc_checking_assert over time.
724 Set DECL/LEN so callers can examine them. */
725 if (len >= maxelts - eltoff)
727 data->decl = decl;
728 data->off = byteoff;
729 data->minlen = ssize_int (len);
730 return NULL_TREE;
733 return ssize_int (len);
736 /* Return a constant integer corresponding to target reading
737 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
738 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
739 are assumed to be zero, otherwise it reads as many characters
740 as needed. */
743 c_readstr (const char *str, scalar_int_mode mode,
744 bool null_terminated_p/*=true*/)
746 HOST_WIDE_INT ch;
747 unsigned int i, j;
748 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
750 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
751 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
752 / HOST_BITS_PER_WIDE_INT;
754 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
755 for (i = 0; i < len; i++)
756 tmp[i] = 0;
758 ch = 1;
759 for (i = 0; i < GET_MODE_SIZE (mode); i++)
761 j = i;
762 if (WORDS_BIG_ENDIAN)
763 j = GET_MODE_SIZE (mode) - i - 1;
764 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
765 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
766 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
767 j *= BITS_PER_UNIT;
769 if (ch || !null_terminated_p)
770 ch = (unsigned char) str[i];
771 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
774 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
775 return immed_wide_int_const (c, mode);
778 /* Cast a target constant CST to target CHAR and if that value fits into
779 host char type, return zero and put that value into variable pointed to by
780 P. */
782 static int
783 target_char_cast (tree cst, char *p)
785 unsigned HOST_WIDE_INT val, hostval;
787 if (TREE_CODE (cst) != INTEGER_CST
788 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
789 return 1;
791 /* Do not care if it fits or not right here. */
792 val = TREE_INT_CST_LOW (cst);
794 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
795 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
797 hostval = val;
798 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
799 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
801 if (val != hostval)
802 return 1;
804 *p = hostval;
805 return 0;
808 /* Similar to save_expr, but assumes that arbitrary code is not executed
809 in between the multiple evaluations. In particular, we assume that a
810 non-addressable local variable will not be modified. */
812 static tree
813 builtin_save_expr (tree exp)
815 if (TREE_CODE (exp) == SSA_NAME
816 || (TREE_ADDRESSABLE (exp) == 0
817 && (TREE_CODE (exp) == PARM_DECL
818 || (VAR_P (exp) && !TREE_STATIC (exp)))))
819 return exp;
821 return save_expr (exp);
824 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
825 times to get the address of either a higher stack frame, or a return
826 address located within it (depending on FNDECL_CODE). */
828 static rtx
829 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
831 int i;
832 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
833 if (tem == NULL_RTX)
835 /* For a zero count with __builtin_return_address, we don't care what
836 frame address we return, because target-specific definitions will
837 override us. Therefore frame pointer elimination is OK, and using
838 the soft frame pointer is OK.
840 For a nonzero count, or a zero count with __builtin_frame_address,
841 we require a stable offset from the current frame pointer to the
842 previous one, so we must use the hard frame pointer, and
843 we must disable frame pointer elimination. */
844 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
845 tem = frame_pointer_rtx;
846 else
848 tem = hard_frame_pointer_rtx;
850 /* Tell reload not to eliminate the frame pointer. */
851 crtl->accesses_prior_frames = 1;
855 if (count > 0)
856 SETUP_FRAME_ADDRESSES ();
858 /* On the SPARC, the return address is not in the frame, it is in a
859 register. There is no way to access it off of the current frame
860 pointer, but it can be accessed off the previous frame pointer by
861 reading the value from the register window save area. */
862 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
863 count--;
865 /* Scan back COUNT frames to the specified frame. */
866 for (i = 0; i < count; i++)
868 /* Assume the dynamic chain pointer is in the word that the
869 frame address points to, unless otherwise specified. */
870 tem = DYNAMIC_CHAIN_ADDRESS (tem);
871 tem = memory_address (Pmode, tem);
872 tem = gen_frame_mem (Pmode, tem);
873 tem = copy_to_reg (tem);
876 /* For __builtin_frame_address, return what we've got. But, on
877 the SPARC for example, we may have to add a bias. */
878 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
879 return FRAME_ADDR_RTX (tem);
881 /* For __builtin_return_address, get the return address from that frame. */
882 #ifdef RETURN_ADDR_RTX
883 tem = RETURN_ADDR_RTX (count, tem);
884 #else
885 tem = memory_address (Pmode,
886 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
887 tem = gen_frame_mem (Pmode, tem);
888 #endif
889 return tem;
892 /* Alias set used for setjmp buffer. */
893 static alias_set_type setjmp_alias_set = -1;
895 /* Construct the leading half of a __builtin_setjmp call. Control will
896 return to RECEIVER_LABEL. This is also called directly by the SJLJ
897 exception handling code. */
899 void
900 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
902 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
903 rtx stack_save;
904 rtx mem;
906 if (setjmp_alias_set == -1)
907 setjmp_alias_set = new_alias_set ();
909 buf_addr = convert_memory_address (Pmode, buf_addr);
911 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
913 /* We store the frame pointer and the address of receiver_label in
914 the buffer and use the rest of it for the stack save area, which
915 is machine-dependent. */
917 mem = gen_rtx_MEM (Pmode, buf_addr);
918 set_mem_alias_set (mem, setjmp_alias_set);
919 emit_move_insn (mem, hard_frame_pointer_rtx);
921 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
922 GET_MODE_SIZE (Pmode))),
923 set_mem_alias_set (mem, setjmp_alias_set);
925 emit_move_insn (validize_mem (mem),
926 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
928 stack_save = gen_rtx_MEM (sa_mode,
929 plus_constant (Pmode, buf_addr,
930 2 * GET_MODE_SIZE (Pmode)));
931 set_mem_alias_set (stack_save, setjmp_alias_set);
932 emit_stack_save (SAVE_NONLOCAL, &stack_save);
934 /* If there is further processing to do, do it. */
935 if (targetm.have_builtin_setjmp_setup ())
936 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
938 /* We have a nonlocal label. */
939 cfun->has_nonlocal_label = 1;
942 /* Construct the trailing part of a __builtin_setjmp call. This is
943 also called directly by the SJLJ exception handling code.
944 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
946 void
947 expand_builtin_setjmp_receiver (rtx receiver_label)
949 rtx chain;
951 /* Mark the FP as used when we get here, so we have to make sure it's
952 marked as used by this function. */
953 emit_use (hard_frame_pointer_rtx);
955 /* Mark the static chain as clobbered here so life information
956 doesn't get messed up for it. */
957 chain = rtx_for_static_chain (current_function_decl, true);
958 if (chain && REG_P (chain))
959 emit_clobber (chain);
961 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
963 /* If the argument pointer can be eliminated in favor of the
964 frame pointer, we don't need to restore it. We assume here
965 that if such an elimination is present, it can always be used.
966 This is the case on all known machines; if we don't make this
967 assumption, we do unnecessary saving on many machines. */
968 size_t i;
969 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
971 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
972 if (elim_regs[i].from == ARG_POINTER_REGNUM
973 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
974 break;
976 if (i == ARRAY_SIZE (elim_regs))
978 /* Now restore our arg pointer from the address at which it
979 was saved in our stack frame. */
980 emit_move_insn (crtl->args.internal_arg_pointer,
981 copy_to_reg (get_arg_pointer_save_area ()));
985 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
986 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
987 else if (targetm.have_nonlocal_goto_receiver ())
988 emit_insn (targetm.gen_nonlocal_goto_receiver ());
989 else
990 { /* Nothing */ }
992 /* We must not allow the code we just generated to be reordered by
993 scheduling. Specifically, the update of the frame pointer must
994 happen immediately, not later. */
995 emit_insn (gen_blockage ());
998 /* __builtin_longjmp is passed a pointer to an array of five words (not
999 all will be used on all machines). It operates similarly to the C
1000 library function of the same name, but is more efficient. Much of
1001 the code below is copied from the handling of non-local gotos. */
1003 static void
1004 expand_builtin_longjmp (rtx buf_addr, rtx value)
1006 rtx fp, lab, stack;
1007 rtx_insn *insn, *last;
1008 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1010 /* DRAP is needed for stack realign if longjmp is expanded to current
1011 function */
1012 if (SUPPORTS_STACK_ALIGNMENT)
1013 crtl->need_drap = true;
1015 if (setjmp_alias_set == -1)
1016 setjmp_alias_set = new_alias_set ();
1018 buf_addr = convert_memory_address (Pmode, buf_addr);
1020 buf_addr = force_reg (Pmode, buf_addr);
1022 /* We require that the user must pass a second argument of 1, because
1023 that is what builtin_setjmp will return. */
1024 gcc_assert (value == const1_rtx);
1026 last = get_last_insn ();
1027 if (targetm.have_builtin_longjmp ())
1028 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1029 else
1031 fp = gen_rtx_MEM (Pmode, buf_addr);
1032 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1033 GET_MODE_SIZE (Pmode)));
1035 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1036 2 * GET_MODE_SIZE (Pmode)));
1037 set_mem_alias_set (fp, setjmp_alias_set);
1038 set_mem_alias_set (lab, setjmp_alias_set);
1039 set_mem_alias_set (stack, setjmp_alias_set);
1041 /* Pick up FP, label, and SP from the block and jump. This code is
1042 from expand_goto in stmt.c; see there for detailed comments. */
1043 if (targetm.have_nonlocal_goto ())
1044 /* We have to pass a value to the nonlocal_goto pattern that will
1045 get copied into the static_chain pointer, but it does not matter
1046 what that value is, because builtin_setjmp does not use it. */
1047 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1048 else
1050 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1051 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1053 lab = copy_to_reg (lab);
1055 /* Restore the frame pointer and stack pointer. We must use a
1056 temporary since the setjmp buffer may be a local. */
1057 fp = copy_to_reg (fp);
1058 emit_stack_restore (SAVE_NONLOCAL, stack);
1060 /* Ensure the frame pointer move is not optimized. */
1061 emit_insn (gen_blockage ());
1062 emit_clobber (hard_frame_pointer_rtx);
1063 emit_clobber (frame_pointer_rtx);
1064 emit_move_insn (hard_frame_pointer_rtx, fp);
1066 emit_use (hard_frame_pointer_rtx);
1067 emit_use (stack_pointer_rtx);
1068 emit_indirect_jump (lab);
1072 /* Search backwards and mark the jump insn as a non-local goto.
1073 Note that this precludes the use of __builtin_longjmp to a
1074 __builtin_setjmp target in the same function. However, we've
1075 already cautioned the user that these functions are for
1076 internal exception handling use only. */
1077 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1079 gcc_assert (insn != last);
1081 if (JUMP_P (insn))
1083 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1084 break;
1086 else if (CALL_P (insn))
1087 break;
1091 static inline bool
1092 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1094 return (iter->i < iter->n);
1097 /* This function validates the types of a function call argument list
1098 against a specified list of tree_codes. If the last specifier is a 0,
1099 that represents an ellipsis, otherwise the last specifier must be a
1100 VOID_TYPE. */
1102 static bool
1103 validate_arglist (const_tree callexpr, ...)
1105 enum tree_code code;
1106 bool res = 0;
1107 va_list ap;
1108 const_call_expr_arg_iterator iter;
1109 const_tree arg;
1111 va_start (ap, callexpr);
1112 init_const_call_expr_arg_iterator (callexpr, &iter);
1114 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1115 tree fn = CALL_EXPR_FN (callexpr);
1116 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1118 for (unsigned argno = 1; ; ++argno)
1120 code = (enum tree_code) va_arg (ap, int);
1122 switch (code)
1124 case 0:
1125 /* This signifies an ellipses, any further arguments are all ok. */
1126 res = true;
1127 goto end;
1128 case VOID_TYPE:
1129 /* This signifies an endlink, if no arguments remain, return
1130 true, otherwise return false. */
1131 res = !more_const_call_expr_args_p (&iter);
1132 goto end;
1133 case POINTER_TYPE:
1134 /* The actual argument must be nonnull when either the whole
1135 called function has been declared nonnull, or when the formal
1136 argument corresponding to the actual argument has been. */
1137 if (argmap
1138 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1140 arg = next_const_call_expr_arg (&iter);
1141 if (!validate_arg (arg, code) || integer_zerop (arg))
1142 goto end;
1143 break;
1145 /* FALLTHRU */
1146 default:
1147 /* If no parameters remain or the parameter's code does not
1148 match the specified code, return false. Otherwise continue
1149 checking any remaining arguments. */
1150 arg = next_const_call_expr_arg (&iter);
1151 if (!validate_arg (arg, code))
1152 goto end;
1153 break;
1157 /* We need gotos here since we can only have one VA_CLOSE in a
1158 function. */
1159 end: ;
1160 va_end (ap);
1162 BITMAP_FREE (argmap);
1164 return res;
1167 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1168 and the address of the save area. */
1170 static rtx
1171 expand_builtin_nonlocal_goto (tree exp)
1173 tree t_label, t_save_area;
1174 rtx r_label, r_save_area, r_fp, r_sp;
1175 rtx_insn *insn;
1177 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1178 return NULL_RTX;
1180 t_label = CALL_EXPR_ARG (exp, 0);
1181 t_save_area = CALL_EXPR_ARG (exp, 1);
1183 r_label = expand_normal (t_label);
1184 r_label = convert_memory_address (Pmode, r_label);
1185 r_save_area = expand_normal (t_save_area);
1186 r_save_area = convert_memory_address (Pmode, r_save_area);
1187 /* Copy the address of the save location to a register just in case it was
1188 based on the frame pointer. */
1189 r_save_area = copy_to_reg (r_save_area);
1190 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1191 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1192 plus_constant (Pmode, r_save_area,
1193 GET_MODE_SIZE (Pmode)));
1195 crtl->has_nonlocal_goto = 1;
1197 /* ??? We no longer need to pass the static chain value, afaik. */
1198 if (targetm.have_nonlocal_goto ())
1199 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1200 else
1202 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1203 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1205 r_label = copy_to_reg (r_label);
1207 /* Restore the frame pointer and stack pointer. We must use a
1208 temporary since the setjmp buffer may be a local. */
1209 r_fp = copy_to_reg (r_fp);
1210 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1212 /* Ensure the frame pointer move is not optimized. */
1213 emit_insn (gen_blockage ());
1214 emit_clobber (hard_frame_pointer_rtx);
1215 emit_clobber (frame_pointer_rtx);
1216 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1218 /* USE of hard_frame_pointer_rtx added for consistency;
1219 not clear if really needed. */
1220 emit_use (hard_frame_pointer_rtx);
1221 emit_use (stack_pointer_rtx);
1223 /* If the architecture is using a GP register, we must
1224 conservatively assume that the target function makes use of it.
1225 The prologue of functions with nonlocal gotos must therefore
1226 initialize the GP register to the appropriate value, and we
1227 must then make sure that this value is live at the point
1228 of the jump. (Note that this doesn't necessarily apply
1229 to targets with a nonlocal_goto pattern; they are free
1230 to implement it in their own way. Note also that this is
1231 a no-op if the GP register is a global invariant.) */
1232 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1233 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1234 emit_use (pic_offset_table_rtx);
1236 emit_indirect_jump (r_label);
1239 /* Search backwards to the jump insn and mark it as a
1240 non-local goto. */
1241 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1243 if (JUMP_P (insn))
1245 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1246 break;
1248 else if (CALL_P (insn))
1249 break;
1252 return const0_rtx;
1255 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1256 (not all will be used on all machines) that was passed to __builtin_setjmp.
1257 It updates the stack pointer in that block to the current value. This is
1258 also called directly by the SJLJ exception handling code. */
1260 void
1261 expand_builtin_update_setjmp_buf (rtx buf_addr)
1263 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1264 buf_addr = convert_memory_address (Pmode, buf_addr);
1265 rtx stack_save
1266 = gen_rtx_MEM (sa_mode,
1267 memory_address
1268 (sa_mode,
1269 plus_constant (Pmode, buf_addr,
1270 2 * GET_MODE_SIZE (Pmode))));
1272 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1275 /* Expand a call to __builtin_prefetch. For a target that does not support
1276 data prefetch, evaluate the memory address argument in case it has side
1277 effects. */
1279 static void
1280 expand_builtin_prefetch (tree exp)
1282 tree arg0, arg1, arg2;
1283 int nargs;
1284 rtx op0, op1, op2;
1286 if (!validate_arglist (exp, POINTER_TYPE, 0))
1287 return;
1289 arg0 = CALL_EXPR_ARG (exp, 0);
1291 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1292 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1293 locality). */
1294 nargs = call_expr_nargs (exp);
1295 if (nargs > 1)
1296 arg1 = CALL_EXPR_ARG (exp, 1);
1297 else
1298 arg1 = integer_zero_node;
1299 if (nargs > 2)
1300 arg2 = CALL_EXPR_ARG (exp, 2);
1301 else
1302 arg2 = integer_three_node;
1304 /* Argument 0 is an address. */
1305 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1307 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1308 if (TREE_CODE (arg1) != INTEGER_CST)
1310 error ("second argument to %<__builtin_prefetch%> must be a constant");
1311 arg1 = integer_zero_node;
1313 op1 = expand_normal (arg1);
1314 /* Argument 1 must be either zero or one. */
1315 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1317 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1318 " using zero");
1319 op1 = const0_rtx;
1322 /* Argument 2 (locality) must be a compile-time constant int. */
1323 if (TREE_CODE (arg2) != INTEGER_CST)
1325 error ("third argument to %<__builtin_prefetch%> must be a constant");
1326 arg2 = integer_zero_node;
1328 op2 = expand_normal (arg2);
1329 /* Argument 2 must be 0, 1, 2, or 3. */
1330 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1332 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1333 op2 = const0_rtx;
1336 if (targetm.have_prefetch ())
1338 class expand_operand ops[3];
1340 create_address_operand (&ops[0], op0);
1341 create_integer_operand (&ops[1], INTVAL (op1));
1342 create_integer_operand (&ops[2], INTVAL (op2));
1343 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1344 return;
1347 /* Don't do anything with direct references to volatile memory, but
1348 generate code to handle other side effects. */
1349 if (!MEM_P (op0) && side_effects_p (op0))
1350 emit_insn (op0);
1353 /* Get a MEM rtx for expression EXP which is the address of an operand
1354 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1355 the maximum length of the block of memory that might be accessed or
1356 NULL if unknown. */
1358 static rtx
1359 get_memory_rtx (tree exp, tree len)
1361 tree orig_exp = exp;
1362 rtx addr, mem;
1364 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1365 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1366 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1367 exp = TREE_OPERAND (exp, 0);
1369 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1370 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1372 /* Get an expression we can use to find the attributes to assign to MEM.
1373 First remove any nops. */
1374 while (CONVERT_EXPR_P (exp)
1375 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1376 exp = TREE_OPERAND (exp, 0);
1378 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1379 (as builtin stringops may alias with anything). */
1380 exp = fold_build2 (MEM_REF,
1381 build_array_type (char_type_node,
1382 build_range_type (sizetype,
1383 size_one_node, len)),
1384 exp, build_int_cst (ptr_type_node, 0));
1386 /* If the MEM_REF has no acceptable address, try to get the base object
1387 from the original address we got, and build an all-aliasing
1388 unknown-sized access to that one. */
1389 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1390 set_mem_attributes (mem, exp, 0);
1391 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1392 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1393 0))))
1395 exp = build_fold_addr_expr (exp);
1396 exp = fold_build2 (MEM_REF,
1397 build_array_type (char_type_node,
1398 build_range_type (sizetype,
1399 size_zero_node,
1400 NULL)),
1401 exp, build_int_cst (ptr_type_node, 0));
1402 set_mem_attributes (mem, exp, 0);
1404 set_mem_alias_set (mem, 0);
1405 return mem;
1408 /* Built-in functions to perform an untyped call and return. */
1410 #define apply_args_mode \
1411 (this_target_builtins->x_apply_args_mode)
1412 #define apply_result_mode \
1413 (this_target_builtins->x_apply_result_mode)
1415 /* Return the size required for the block returned by __builtin_apply_args,
1416 and initialize apply_args_mode. */
1418 static int
1419 apply_args_size (void)
1421 static int size = -1;
1422 int align;
1423 unsigned int regno;
1425 /* The values computed by this function never change. */
1426 if (size < 0)
1428 /* The first value is the incoming arg-pointer. */
1429 size = GET_MODE_SIZE (Pmode);
1431 /* The second value is the structure value address unless this is
1432 passed as an "invisible" first argument. */
1433 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1434 size += GET_MODE_SIZE (Pmode);
1436 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1437 if (FUNCTION_ARG_REGNO_P (regno))
1439 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1441 gcc_assert (mode != VOIDmode);
1443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1444 if (size % align != 0)
1445 size = CEIL (size, align) * align;
1446 size += GET_MODE_SIZE (mode);
1447 apply_args_mode[regno] = mode;
1449 else
1451 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1454 return size;
1457 /* Return the size required for the block returned by __builtin_apply,
1458 and initialize apply_result_mode. */
1460 static int
1461 apply_result_size (void)
1463 static int size = -1;
1464 int align, regno;
1466 /* The values computed by this function never change. */
1467 if (size < 0)
1469 size = 0;
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if (targetm.calls.function_value_regno_p (regno))
1474 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1476 gcc_assert (mode != VOIDmode);
1478 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1479 if (size % align != 0)
1480 size = CEIL (size, align) * align;
1481 size += GET_MODE_SIZE (mode);
1482 apply_result_mode[regno] = mode;
1484 else
1485 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1487 /* Allow targets that use untyped_call and untyped_return to override
1488 the size so that machine-specific information can be stored here. */
1489 #ifdef APPLY_RESULT_SIZE
1490 size = APPLY_RESULT_SIZE;
1491 #endif
1493 return size;
1496 /* Create a vector describing the result block RESULT. If SAVEP is true,
1497 the result block is used to save the values; otherwise it is used to
1498 restore the values. */
1500 static rtx
1501 result_vector (int savep, rtx result)
1503 int regno, size, align, nelts;
1504 fixed_size_mode mode;
1505 rtx reg, mem;
1506 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1508 size = nelts = 0;
1509 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1510 if ((mode = apply_result_mode[regno]) != VOIDmode)
1512 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1513 if (size % align != 0)
1514 size = CEIL (size, align) * align;
1515 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1516 mem = adjust_address (result, mode, size);
1517 savevec[nelts++] = (savep
1518 ? gen_rtx_SET (mem, reg)
1519 : gen_rtx_SET (reg, mem));
1520 size += GET_MODE_SIZE (mode);
1522 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1525 /* Save the state required to perform an untyped call with the same
1526 arguments as were passed to the current function. */
1528 static rtx
1529 expand_builtin_apply_args_1 (void)
1531 rtx registers, tem;
1532 int size, align, regno;
1533 fixed_size_mode mode;
1534 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1536 /* Create a block where the arg-pointer, structure value address,
1537 and argument registers can be saved. */
1538 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1540 /* Walk past the arg-pointer and structure value address. */
1541 size = GET_MODE_SIZE (Pmode);
1542 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1543 size += GET_MODE_SIZE (Pmode);
1545 /* Save each register used in calling a function to the block. */
1546 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1547 if ((mode = apply_args_mode[regno]) != VOIDmode)
1549 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1550 if (size % align != 0)
1551 size = CEIL (size, align) * align;
1553 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1555 emit_move_insn (adjust_address (registers, mode, size), tem);
1556 size += GET_MODE_SIZE (mode);
1559 /* Save the arg pointer to the block. */
1560 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1561 /* We need the pointer as the caller actually passed them to us, not
1562 as we might have pretended they were passed. Make sure it's a valid
1563 operand, as emit_move_insn isn't expected to handle a PLUS. */
1564 if (STACK_GROWS_DOWNWARD)
1566 = force_operand (plus_constant (Pmode, tem,
1567 crtl->args.pretend_args_size),
1568 NULL_RTX);
1569 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1571 size = GET_MODE_SIZE (Pmode);
1573 /* Save the structure value address unless this is passed as an
1574 "invisible" first argument. */
1575 if (struct_incoming_value)
1576 emit_move_insn (adjust_address (registers, Pmode, size),
1577 copy_to_reg (struct_incoming_value));
1579 /* Return the address of the block. */
1580 return copy_addr_to_reg (XEXP (registers, 0));
1583 /* __builtin_apply_args returns block of memory allocated on
1584 the stack into which is stored the arg pointer, structure
1585 value address, static chain, and all the registers that might
1586 possibly be used in performing a function call. The code is
1587 moved to the start of the function so the incoming values are
1588 saved. */
1590 static rtx
1591 expand_builtin_apply_args (void)
1593 /* Don't do __builtin_apply_args more than once in a function.
1594 Save the result of the first call and reuse it. */
1595 if (apply_args_value != 0)
1596 return apply_args_value;
1598 /* When this function is called, it means that registers must be
1599 saved on entry to this function. So we migrate the
1600 call to the first insn of this function. */
1601 rtx temp;
1603 start_sequence ();
1604 temp = expand_builtin_apply_args_1 ();
1605 rtx_insn *seq = get_insns ();
1606 end_sequence ();
1608 apply_args_value = temp;
1610 /* Put the insns after the NOTE that starts the function.
1611 If this is inside a start_sequence, make the outer-level insn
1612 chain current, so the code is placed at the start of the
1613 function. If internal_arg_pointer is a non-virtual pseudo,
1614 it needs to be placed after the function that initializes
1615 that pseudo. */
1616 push_topmost_sequence ();
1617 if (REG_P (crtl->args.internal_arg_pointer)
1618 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1619 emit_insn_before (seq, parm_birth_insn);
1620 else
1621 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1622 pop_topmost_sequence ();
1623 return temp;
1627 /* Perform an untyped call and save the state required to perform an
1628 untyped return of whatever value was returned by the given function. */
1630 static rtx
1631 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1633 int size, align, regno;
1634 fixed_size_mode mode;
1635 rtx incoming_args, result, reg, dest, src;
1636 rtx_call_insn *call_insn;
1637 rtx old_stack_level = 0;
1638 rtx call_fusage = 0;
1639 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1641 arguments = convert_memory_address (Pmode, arguments);
1643 /* Create a block where the return registers can be saved. */
1644 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1646 /* Fetch the arg pointer from the ARGUMENTS block. */
1647 incoming_args = gen_reg_rtx (Pmode);
1648 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1649 if (!STACK_GROWS_DOWNWARD)
1650 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1651 incoming_args, 0, OPTAB_LIB_WIDEN);
1653 /* Push a new argument block and copy the arguments. Do not allow
1654 the (potential) memcpy call below to interfere with our stack
1655 manipulations. */
1656 do_pending_stack_adjust ();
1657 NO_DEFER_POP;
1659 /* Save the stack with nonlocal if available. */
1660 if (targetm.have_save_stack_nonlocal ())
1661 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1662 else
1663 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1665 /* Allocate a block of memory onto the stack and copy the memory
1666 arguments to the outgoing arguments address. We can pass TRUE
1667 as the 4th argument because we just saved the stack pointer
1668 and will restore it right after the call. */
1669 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1671 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1672 may have already set current_function_calls_alloca to true.
1673 current_function_calls_alloca won't be set if argsize is zero,
1674 so we have to guarantee need_drap is true here. */
1675 if (SUPPORTS_STACK_ALIGNMENT)
1676 crtl->need_drap = true;
1678 dest = virtual_outgoing_args_rtx;
1679 if (!STACK_GROWS_DOWNWARD)
1681 if (CONST_INT_P (argsize))
1682 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1683 else
1684 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1686 dest = gen_rtx_MEM (BLKmode, dest);
1687 set_mem_align (dest, PARM_BOUNDARY);
1688 src = gen_rtx_MEM (BLKmode, incoming_args);
1689 set_mem_align (src, PARM_BOUNDARY);
1690 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1692 /* Refer to the argument block. */
1693 apply_args_size ();
1694 arguments = gen_rtx_MEM (BLKmode, arguments);
1695 set_mem_align (arguments, PARM_BOUNDARY);
1697 /* Walk past the arg-pointer and structure value address. */
1698 size = GET_MODE_SIZE (Pmode);
1699 if (struct_value)
1700 size += GET_MODE_SIZE (Pmode);
1702 /* Restore each of the registers previously saved. Make USE insns
1703 for each of these registers for use in making the call. */
1704 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1705 if ((mode = apply_args_mode[regno]) != VOIDmode)
1707 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1708 if (size % align != 0)
1709 size = CEIL (size, align) * align;
1710 reg = gen_rtx_REG (mode, regno);
1711 emit_move_insn (reg, adjust_address (arguments, mode, size));
1712 use_reg (&call_fusage, reg);
1713 size += GET_MODE_SIZE (mode);
1716 /* Restore the structure value address unless this is passed as an
1717 "invisible" first argument. */
1718 size = GET_MODE_SIZE (Pmode);
1719 if (struct_value)
1721 rtx value = gen_reg_rtx (Pmode);
1722 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1723 emit_move_insn (struct_value, value);
1724 if (REG_P (struct_value))
1725 use_reg (&call_fusage, struct_value);
1728 /* All arguments and registers used for the call are set up by now! */
1729 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1731 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1732 and we don't want to load it into a register as an optimization,
1733 because prepare_call_address already did it if it should be done. */
1734 if (GET_CODE (function) != SYMBOL_REF)
1735 function = memory_address (FUNCTION_MODE, function);
1737 /* Generate the actual call instruction and save the return value. */
1738 if (targetm.have_untyped_call ())
1740 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1741 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1742 result_vector (1, result));
1743 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1744 if (CALL_P (insn))
1745 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1746 emit_insn (seq);
1748 else if (targetm.have_call_value ())
1750 rtx valreg = 0;
1752 /* Locate the unique return register. It is not possible to
1753 express a call that sets more than one return register using
1754 call_value; use untyped_call for that. In fact, untyped_call
1755 only needs to save the return registers in the given block. */
1756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1757 if ((mode = apply_result_mode[regno]) != VOIDmode)
1759 gcc_assert (!valreg); /* have_untyped_call required. */
1761 valreg = gen_rtx_REG (mode, regno);
1764 emit_insn (targetm.gen_call_value (valreg,
1765 gen_rtx_MEM (FUNCTION_MODE, function),
1766 const0_rtx, NULL_RTX, const0_rtx));
1768 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1770 else
1771 gcc_unreachable ();
1773 /* Find the CALL insn we just emitted, and attach the register usage
1774 information. */
1775 call_insn = last_call_insn ();
1776 add_function_usage_to (call_insn, call_fusage);
1778 /* Restore the stack. */
1779 if (targetm.have_save_stack_nonlocal ())
1780 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1781 else
1782 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1783 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1785 OK_DEFER_POP;
1787 /* Return the address of the result block. */
1788 result = copy_addr_to_reg (XEXP (result, 0));
1789 return convert_memory_address (ptr_mode, result);
1792 /* Perform an untyped return. */
1794 static void
1795 expand_builtin_return (rtx result)
1797 int size, align, regno;
1798 fixed_size_mode mode;
1799 rtx reg;
1800 rtx_insn *call_fusage = 0;
1802 result = convert_memory_address (Pmode, result);
1804 apply_result_size ();
1805 result = gen_rtx_MEM (BLKmode, result);
1807 if (targetm.have_untyped_return ())
1809 rtx vector = result_vector (0, result);
1810 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1811 emit_barrier ();
1812 return;
1815 /* Restore the return value and note that each value is used. */
1816 size = 0;
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_result_mode[regno]) != VOIDmode)
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1824 emit_move_insn (reg, adjust_address (result, mode, size));
1826 push_to_sequence (call_fusage);
1827 emit_use (reg);
1828 call_fusage = get_insns ();
1829 end_sequence ();
1830 size += GET_MODE_SIZE (mode);
1833 /* Put the USE insns before the return. */
1834 emit_insn (call_fusage);
1836 /* Return whatever values was restored by jumping directly to the end
1837 of the function. */
1838 expand_naked_return ();
1841 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1843 static enum type_class
1844 type_to_class (tree type)
1846 switch (TREE_CODE (type))
1848 case VOID_TYPE: return void_type_class;
1849 case INTEGER_TYPE: return integer_type_class;
1850 case ENUMERAL_TYPE: return enumeral_type_class;
1851 case BOOLEAN_TYPE: return boolean_type_class;
1852 case POINTER_TYPE: return pointer_type_class;
1853 case REFERENCE_TYPE: return reference_type_class;
1854 case OFFSET_TYPE: return offset_type_class;
1855 case REAL_TYPE: return real_type_class;
1856 case COMPLEX_TYPE: return complex_type_class;
1857 case FUNCTION_TYPE: return function_type_class;
1858 case METHOD_TYPE: return method_type_class;
1859 case RECORD_TYPE: return record_type_class;
1860 case UNION_TYPE:
1861 case QUAL_UNION_TYPE: return union_type_class;
1862 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1863 ? string_type_class : array_type_class);
1864 case LANG_TYPE: return lang_type_class;
1865 case OPAQUE_TYPE: return opaque_type_class;
1866 default: return no_type_class;
1870 /* Expand a call EXP to __builtin_classify_type. */
1872 static rtx
1873 expand_builtin_classify_type (tree exp)
1875 if (call_expr_nargs (exp))
1876 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1877 return GEN_INT (no_type_class);
1880 /* This helper macro, meant to be used in mathfn_built_in below, determines
1881 which among a set of builtin math functions is appropriate for a given type
1882 mode. The `F' (float) and `L' (long double) are automatically generated
1883 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1884 types, there are additional types that are considered with 'F32', 'F64',
1885 'F128', etc. suffixes. */
1886 #define CASE_MATHFN(MATHFN) \
1887 CASE_CFN_##MATHFN: \
1888 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1889 fcodel = BUILT_IN_##MATHFN##L ; break;
1890 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1891 types. */
1892 #define CASE_MATHFN_FLOATN(MATHFN) \
1893 CASE_CFN_##MATHFN: \
1894 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1895 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1896 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1897 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1898 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1899 break;
1900 /* Similar to above, but appends _R after any F/L suffix. */
1901 #define CASE_MATHFN_REENT(MATHFN) \
1902 case CFN_BUILT_IN_##MATHFN##_R: \
1903 case CFN_BUILT_IN_##MATHFN##F_R: \
1904 case CFN_BUILT_IN_##MATHFN##L_R: \
1905 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1906 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1908 /* Return a function equivalent to FN but operating on floating-point
1909 values of type TYPE, or END_BUILTINS if no such function exists.
1910 This is purely an operation on function codes; it does not guarantee
1911 that the target actually has an implementation of the function. */
1913 static built_in_function
1914 mathfn_built_in_2 (tree type, combined_fn fn)
1916 tree mtype;
1917 built_in_function fcode, fcodef, fcodel;
1918 built_in_function fcodef16 = END_BUILTINS;
1919 built_in_function fcodef32 = END_BUILTINS;
1920 built_in_function fcodef64 = END_BUILTINS;
1921 built_in_function fcodef128 = END_BUILTINS;
1922 built_in_function fcodef32x = END_BUILTINS;
1923 built_in_function fcodef64x = END_BUILTINS;
1924 built_in_function fcodef128x = END_BUILTINS;
1926 switch (fn)
1928 #define SEQ_OF_CASE_MATHFN \
1929 CASE_MATHFN (ACOS) \
1930 CASE_MATHFN (ACOSH) \
1931 CASE_MATHFN (ASIN) \
1932 CASE_MATHFN (ASINH) \
1933 CASE_MATHFN (ATAN) \
1934 CASE_MATHFN (ATAN2) \
1935 CASE_MATHFN (ATANH) \
1936 CASE_MATHFN (CBRT) \
1937 CASE_MATHFN_FLOATN (CEIL) \
1938 CASE_MATHFN (CEXPI) \
1939 CASE_MATHFN_FLOATN (COPYSIGN) \
1940 CASE_MATHFN (COS) \
1941 CASE_MATHFN (COSH) \
1942 CASE_MATHFN (DREM) \
1943 CASE_MATHFN (ERF) \
1944 CASE_MATHFN (ERFC) \
1945 CASE_MATHFN (EXP) \
1946 CASE_MATHFN (EXP10) \
1947 CASE_MATHFN (EXP2) \
1948 CASE_MATHFN (EXPM1) \
1949 CASE_MATHFN (FABS) \
1950 CASE_MATHFN (FDIM) \
1951 CASE_MATHFN_FLOATN (FLOOR) \
1952 CASE_MATHFN_FLOATN (FMA) \
1953 CASE_MATHFN_FLOATN (FMAX) \
1954 CASE_MATHFN_FLOATN (FMIN) \
1955 CASE_MATHFN (FMOD) \
1956 CASE_MATHFN (FREXP) \
1957 CASE_MATHFN (GAMMA) \
1958 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1959 CASE_MATHFN (HUGE_VAL) \
1960 CASE_MATHFN (HYPOT) \
1961 CASE_MATHFN (ILOGB) \
1962 CASE_MATHFN (ICEIL) \
1963 CASE_MATHFN (IFLOOR) \
1964 CASE_MATHFN (INF) \
1965 CASE_MATHFN (IRINT) \
1966 CASE_MATHFN (IROUND) \
1967 CASE_MATHFN (ISINF) \
1968 CASE_MATHFN (J0) \
1969 CASE_MATHFN (J1) \
1970 CASE_MATHFN (JN) \
1971 CASE_MATHFN (LCEIL) \
1972 CASE_MATHFN (LDEXP) \
1973 CASE_MATHFN (LFLOOR) \
1974 CASE_MATHFN (LGAMMA) \
1975 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1976 CASE_MATHFN (LLCEIL) \
1977 CASE_MATHFN (LLFLOOR) \
1978 CASE_MATHFN (LLRINT) \
1979 CASE_MATHFN (LLROUND) \
1980 CASE_MATHFN (LOG) \
1981 CASE_MATHFN (LOG10) \
1982 CASE_MATHFN (LOG1P) \
1983 CASE_MATHFN (LOG2) \
1984 CASE_MATHFN (LOGB) \
1985 CASE_MATHFN (LRINT) \
1986 CASE_MATHFN (LROUND) \
1987 CASE_MATHFN (MODF) \
1988 CASE_MATHFN (NAN) \
1989 CASE_MATHFN (NANS) \
1990 CASE_MATHFN_FLOATN (NEARBYINT) \
1991 CASE_MATHFN (NEXTAFTER) \
1992 CASE_MATHFN (NEXTTOWARD) \
1993 CASE_MATHFN (POW) \
1994 CASE_MATHFN (POWI) \
1995 CASE_MATHFN (POW10) \
1996 CASE_MATHFN (REMAINDER) \
1997 CASE_MATHFN (REMQUO) \
1998 CASE_MATHFN_FLOATN (RINT) \
1999 CASE_MATHFN_FLOATN (ROUND) \
2000 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2001 CASE_MATHFN (SCALB) \
2002 CASE_MATHFN (SCALBLN) \
2003 CASE_MATHFN (SCALBN) \
2004 CASE_MATHFN (SIGNBIT) \
2005 CASE_MATHFN (SIGNIFICAND) \
2006 CASE_MATHFN (SIN) \
2007 CASE_MATHFN (SINCOS) \
2008 CASE_MATHFN (SINH) \
2009 CASE_MATHFN_FLOATN (SQRT) \
2010 CASE_MATHFN (TAN) \
2011 CASE_MATHFN (TANH) \
2012 CASE_MATHFN (TGAMMA) \
2013 CASE_MATHFN_FLOATN (TRUNC) \
2014 CASE_MATHFN (Y0) \
2015 CASE_MATHFN (Y1) \
2016 CASE_MATHFN (YN)
2018 SEQ_OF_CASE_MATHFN
2020 default:
2021 return END_BUILTINS;
2024 mtype = TYPE_MAIN_VARIANT (type);
2025 if (mtype == double_type_node)
2026 return fcode;
2027 else if (mtype == float_type_node)
2028 return fcodef;
2029 else if (mtype == long_double_type_node)
2030 return fcodel;
2031 else if (mtype == float16_type_node)
2032 return fcodef16;
2033 else if (mtype == float32_type_node)
2034 return fcodef32;
2035 else if (mtype == float64_type_node)
2036 return fcodef64;
2037 else if (mtype == float128_type_node)
2038 return fcodef128;
2039 else if (mtype == float32x_type_node)
2040 return fcodef32x;
2041 else if (mtype == float64x_type_node)
2042 return fcodef64x;
2043 else if (mtype == float128x_type_node)
2044 return fcodef128x;
2045 else
2046 return END_BUILTINS;
2049 #undef CASE_MATHFN
2050 #undef CASE_MATHFN_FLOATN
2051 #undef CASE_MATHFN_REENT
2053 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2054 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2055 otherwise use the explicit declaration. If we can't do the conversion,
2056 return null. */
2058 static tree
2059 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2061 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2062 if (fcode2 == END_BUILTINS)
2063 return NULL_TREE;
2065 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2066 return NULL_TREE;
2068 return builtin_decl_explicit (fcode2);
2071 /* Like mathfn_built_in_1, but always use the implicit array. */
2073 tree
2074 mathfn_built_in (tree type, combined_fn fn)
2076 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2079 /* Like mathfn_built_in_1, but take a built_in_function and
2080 always use the implicit array. */
2082 tree
2083 mathfn_built_in (tree type, enum built_in_function fn)
2085 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2088 /* Return the type associated with a built in function, i.e., the one
2089 to be passed to mathfn_built_in to get the type-specific
2090 function. */
2092 tree
2093 mathfn_built_in_type (combined_fn fn)
2095 #define CASE_MATHFN(MATHFN) \
2096 case CFN_BUILT_IN_##MATHFN: \
2097 return double_type_node; \
2098 case CFN_BUILT_IN_##MATHFN##F: \
2099 return float_type_node; \
2100 case CFN_BUILT_IN_##MATHFN##L: \
2101 return long_double_type_node;
2103 #define CASE_MATHFN_FLOATN(MATHFN) \
2104 CASE_MATHFN(MATHFN) \
2105 case CFN_BUILT_IN_##MATHFN##F16: \
2106 return float16_type_node; \
2107 case CFN_BUILT_IN_##MATHFN##F32: \
2108 return float32_type_node; \
2109 case CFN_BUILT_IN_##MATHFN##F64: \
2110 return float64_type_node; \
2111 case CFN_BUILT_IN_##MATHFN##F128: \
2112 return float128_type_node; \
2113 case CFN_BUILT_IN_##MATHFN##F32X: \
2114 return float32x_type_node; \
2115 case CFN_BUILT_IN_##MATHFN##F64X: \
2116 return float64x_type_node; \
2117 case CFN_BUILT_IN_##MATHFN##F128X: \
2118 return float128x_type_node;
2120 /* Similar to above, but appends _R after any F/L suffix. */
2121 #define CASE_MATHFN_REENT(MATHFN) \
2122 case CFN_BUILT_IN_##MATHFN##_R: \
2123 return double_type_node; \
2124 case CFN_BUILT_IN_##MATHFN##F_R: \
2125 return float_type_node; \
2126 case CFN_BUILT_IN_##MATHFN##L_R: \
2127 return long_double_type_node;
2129 switch (fn)
2131 SEQ_OF_CASE_MATHFN
2133 default:
2134 return NULL_TREE;
2137 #undef CASE_MATHFN
2138 #undef CASE_MATHFN_FLOATN
2139 #undef CASE_MATHFN_REENT
2140 #undef SEQ_OF_CASE_MATHFN
2143 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2144 return its code, otherwise return IFN_LAST. Note that this function
2145 only tests whether the function is defined in internals.def, not whether
2146 it is actually available on the target. */
2148 internal_fn
2149 associated_internal_fn (tree fndecl)
2151 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2152 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2156 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2157 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2158 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2159 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2160 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2161 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2162 #include "internal-fn.def"
2164 CASE_FLT_FN (BUILT_IN_POW10):
2165 return IFN_EXP10;
2167 CASE_FLT_FN (BUILT_IN_DREM):
2168 return IFN_REMAINDER;
2170 CASE_FLT_FN (BUILT_IN_SCALBN):
2171 CASE_FLT_FN (BUILT_IN_SCALBLN):
2172 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2173 return IFN_LDEXP;
2174 return IFN_LAST;
2176 default:
2177 return IFN_LAST;
2181 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2182 on the current target by a call to an internal function, return the
2183 code of that internal function, otherwise return IFN_LAST. The caller
2184 is responsible for ensuring that any side-effects of the built-in
2185 call are dealt with correctly. E.g. if CALL sets errno, the caller
2186 must decide that the errno result isn't needed or make it available
2187 in some other way. */
2189 internal_fn
2190 replacement_internal_fn (gcall *call)
2192 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2194 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2195 if (ifn != IFN_LAST)
2197 tree_pair types = direct_internal_fn_types (ifn, call);
2198 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2199 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2200 return ifn;
2203 return IFN_LAST;
2206 /* Expand a call to the builtin trinary math functions (fma).
2207 Return NULL_RTX if a normal call should be emitted rather than expanding the
2208 function in-line. EXP is the expression that is a call to the builtin
2209 function; if convenient, the result should be placed in TARGET.
2210 SUBTARGET may be used as the target for computing one of EXP's
2211 operands. */
2213 static rtx
2214 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2216 optab builtin_optab;
2217 rtx op0, op1, op2, result;
2218 rtx_insn *insns;
2219 tree fndecl = get_callee_fndecl (exp);
2220 tree arg0, arg1, arg2;
2221 machine_mode mode;
2223 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2224 return NULL_RTX;
2226 arg0 = CALL_EXPR_ARG (exp, 0);
2227 arg1 = CALL_EXPR_ARG (exp, 1);
2228 arg2 = CALL_EXPR_ARG (exp, 2);
2230 switch (DECL_FUNCTION_CODE (fndecl))
2232 CASE_FLT_FN (BUILT_IN_FMA):
2233 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2234 builtin_optab = fma_optab; break;
2235 default:
2236 gcc_unreachable ();
2239 /* Make a suitable register to place result in. */
2240 mode = TYPE_MODE (TREE_TYPE (exp));
2242 /* Before working hard, check whether the instruction is available. */
2243 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2244 return NULL_RTX;
2246 result = gen_reg_rtx (mode);
2248 /* Always stabilize the argument list. */
2249 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2250 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2251 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2253 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2254 op1 = expand_normal (arg1);
2255 op2 = expand_normal (arg2);
2257 start_sequence ();
2259 /* Compute into RESULT.
2260 Set RESULT to wherever the result comes back. */
2261 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2262 result, 0);
2264 /* If we were unable to expand via the builtin, stop the sequence
2265 (without outputting the insns) and call to the library function
2266 with the stabilized argument list. */
2267 if (result == 0)
2269 end_sequence ();
2270 return expand_call (exp, target, target == const0_rtx);
2273 /* Output the entire sequence. */
2274 insns = get_insns ();
2275 end_sequence ();
2276 emit_insn (insns);
2278 return result;
2281 /* Expand a call to the builtin sin and cos math functions.
2282 Return NULL_RTX if a normal call should be emitted rather than expanding the
2283 function in-line. EXP is the expression that is a call to the builtin
2284 function; if convenient, the result should be placed in TARGET.
2285 SUBTARGET may be used as the target for computing one of EXP's
2286 operands. */
2288 static rtx
2289 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2291 optab builtin_optab;
2292 rtx op0;
2293 rtx_insn *insns;
2294 tree fndecl = get_callee_fndecl (exp);
2295 machine_mode mode;
2296 tree arg;
2298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2299 return NULL_RTX;
2301 arg = CALL_EXPR_ARG (exp, 0);
2303 switch (DECL_FUNCTION_CODE (fndecl))
2305 CASE_FLT_FN (BUILT_IN_SIN):
2306 CASE_FLT_FN (BUILT_IN_COS):
2307 builtin_optab = sincos_optab; break;
2308 default:
2309 gcc_unreachable ();
2312 /* Make a suitable register to place result in. */
2313 mode = TYPE_MODE (TREE_TYPE (exp));
2315 /* Check if sincos insn is available, otherwise fallback
2316 to sin or cos insn. */
2317 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2318 switch (DECL_FUNCTION_CODE (fndecl))
2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 builtin_optab = sin_optab; break;
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = cos_optab; break;
2324 default:
2325 gcc_unreachable ();
2328 /* Before working hard, check whether the instruction is available. */
2329 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2331 rtx result = gen_reg_rtx (mode);
2333 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2334 need to expand the argument again. This way, we will not perform
2335 side-effects more the once. */
2336 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2338 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2340 start_sequence ();
2342 /* Compute into RESULT.
2343 Set RESULT to wherever the result comes back. */
2344 if (builtin_optab == sincos_optab)
2346 int ok;
2348 switch (DECL_FUNCTION_CODE (fndecl))
2350 CASE_FLT_FN (BUILT_IN_SIN):
2351 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2352 break;
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2355 break;
2356 default:
2357 gcc_unreachable ();
2359 gcc_assert (ok);
2361 else
2362 result = expand_unop (mode, builtin_optab, op0, result, 0);
2364 if (result != 0)
2366 /* Output the entire sequence. */
2367 insns = get_insns ();
2368 end_sequence ();
2369 emit_insn (insns);
2370 return result;
2373 /* If we were unable to expand via the builtin, stop the sequence
2374 (without outputting the insns) and call to the library function
2375 with the stabilized argument list. */
2376 end_sequence ();
2379 return expand_call (exp, target, target == const0_rtx);
2382 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2383 return an RTL instruction code that implements the functionality.
2384 If that isn't possible or available return CODE_FOR_nothing. */
2386 static enum insn_code
2387 interclass_mathfn_icode (tree arg, tree fndecl)
2389 bool errno_set = false;
2390 optab builtin_optab = unknown_optab;
2391 machine_mode mode;
2393 switch (DECL_FUNCTION_CODE (fndecl))
2395 CASE_FLT_FN (BUILT_IN_ILOGB):
2396 errno_set = true; builtin_optab = ilogb_optab; break;
2397 CASE_FLT_FN (BUILT_IN_ISINF):
2398 builtin_optab = isinf_optab; break;
2399 case BUILT_IN_ISNORMAL:
2400 case BUILT_IN_ISFINITE:
2401 CASE_FLT_FN (BUILT_IN_FINITE):
2402 case BUILT_IN_FINITED32:
2403 case BUILT_IN_FINITED64:
2404 case BUILT_IN_FINITED128:
2405 case BUILT_IN_ISINFD32:
2406 case BUILT_IN_ISINFD64:
2407 case BUILT_IN_ISINFD128:
2408 /* These builtins have no optabs (yet). */
2409 break;
2410 default:
2411 gcc_unreachable ();
2414 /* There's no easy way to detect the case we need to set EDOM. */
2415 if (flag_errno_math && errno_set)
2416 return CODE_FOR_nothing;
2418 /* Optab mode depends on the mode of the input argument. */
2419 mode = TYPE_MODE (TREE_TYPE (arg));
2421 if (builtin_optab)
2422 return optab_handler (builtin_optab, mode);
2423 return CODE_FOR_nothing;
2426 /* Expand a call to one of the builtin math functions that operate on
2427 floating point argument and output an integer result (ilogb, isinf,
2428 isnan, etc).
2429 Return 0 if a normal call should be emitted rather than expanding the
2430 function in-line. EXP is the expression that is a call to the builtin
2431 function; if convenient, the result should be placed in TARGET. */
2433 static rtx
2434 expand_builtin_interclass_mathfn (tree exp, rtx target)
2436 enum insn_code icode = CODE_FOR_nothing;
2437 rtx op0;
2438 tree fndecl = get_callee_fndecl (exp);
2439 machine_mode mode;
2440 tree arg;
2442 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2443 return NULL_RTX;
2445 arg = CALL_EXPR_ARG (exp, 0);
2446 icode = interclass_mathfn_icode (arg, fndecl);
2447 mode = TYPE_MODE (TREE_TYPE (arg));
2449 if (icode != CODE_FOR_nothing)
2451 class expand_operand ops[1];
2452 rtx_insn *last = get_last_insn ();
2453 tree orig_arg = arg;
2455 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2456 need to expand the argument again. This way, we will not perform
2457 side-effects more the once. */
2458 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2460 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2462 if (mode != GET_MODE (op0))
2463 op0 = convert_to_mode (mode, op0, 0);
2465 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2466 if (maybe_legitimize_operands (icode, 0, 1, ops)
2467 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2468 return ops[0].value;
2470 delete_insns_since (last);
2471 CALL_EXPR_ARG (exp, 0) = orig_arg;
2474 return NULL_RTX;
2477 /* Expand a call to the builtin sincos math function.
2478 Return NULL_RTX if a normal call should be emitted rather than expanding the
2479 function in-line. EXP is the expression that is a call to the builtin
2480 function. */
2482 static rtx
2483 expand_builtin_sincos (tree exp)
2485 rtx op0, op1, op2, target1, target2;
2486 machine_mode mode;
2487 tree arg, sinp, cosp;
2488 int result;
2489 location_t loc = EXPR_LOCATION (exp);
2490 tree alias_type, alias_off;
2492 if (!validate_arglist (exp, REAL_TYPE,
2493 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2494 return NULL_RTX;
2496 arg = CALL_EXPR_ARG (exp, 0);
2497 sinp = CALL_EXPR_ARG (exp, 1);
2498 cosp = CALL_EXPR_ARG (exp, 2);
2500 /* Make a suitable register to place result in. */
2501 mode = TYPE_MODE (TREE_TYPE (arg));
2503 /* Check if sincos insn is available, otherwise emit the call. */
2504 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2505 return NULL_RTX;
2507 target1 = gen_reg_rtx (mode);
2508 target2 = gen_reg_rtx (mode);
2510 op0 = expand_normal (arg);
2511 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2512 alias_off = build_int_cst (alias_type, 0);
2513 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2514 sinp, alias_off));
2515 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2516 cosp, alias_off));
2518 /* Compute into target1 and target2.
2519 Set TARGET to wherever the result comes back. */
2520 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2521 gcc_assert (result);
2523 /* Move target1 and target2 to the memory locations indicated
2524 by op1 and op2. */
2525 emit_move_insn (op1, target1);
2526 emit_move_insn (op2, target2);
2528 return const0_rtx;
2531 /* Expand a call to the internal cexpi builtin to the sincos math function.
2532 EXP is the expression that is a call to the builtin function; if convenient,
2533 the result should be placed in TARGET. */
2535 static rtx
2536 expand_builtin_cexpi (tree exp, rtx target)
2538 tree fndecl = get_callee_fndecl (exp);
2539 tree arg, type;
2540 machine_mode mode;
2541 rtx op0, op1, op2;
2542 location_t loc = EXPR_LOCATION (exp);
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2545 return NULL_RTX;
2547 arg = CALL_EXPR_ARG (exp, 0);
2548 type = TREE_TYPE (arg);
2549 mode = TYPE_MODE (TREE_TYPE (arg));
2551 /* Try expanding via a sincos optab, fall back to emitting a libcall
2552 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2553 is only generated from sincos, cexp or if we have either of them. */
2554 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2556 op1 = gen_reg_rtx (mode);
2557 op2 = gen_reg_rtx (mode);
2559 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2561 /* Compute into op1 and op2. */
2562 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2564 else if (targetm.libc_has_function (function_sincos, type))
2566 tree call, fn = NULL_TREE;
2567 tree top1, top2;
2568 rtx op1a, op2a;
2570 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2571 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2572 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2573 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2574 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2575 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2576 else
2577 gcc_unreachable ();
2579 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2580 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2581 op1a = copy_addr_to_reg (XEXP (op1, 0));
2582 op2a = copy_addr_to_reg (XEXP (op2, 0));
2583 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2584 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2586 /* Make sure not to fold the sincos call again. */
2587 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2588 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2589 call, 3, arg, top1, top2));
2591 else
2593 tree call, fn = NULL_TREE, narg;
2594 tree ctype = build_complex_type (type);
2596 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2597 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2598 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2599 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2600 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2601 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2602 else
2603 gcc_unreachable ();
2605 /* If we don't have a decl for cexp create one. This is the
2606 friendliest fallback if the user calls __builtin_cexpi
2607 without full target C99 function support. */
2608 if (fn == NULL_TREE)
2610 tree fntype;
2611 const char *name = NULL;
2613 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2614 name = "cexpf";
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2616 name = "cexp";
2617 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2618 name = "cexpl";
2620 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2621 fn = build_fn_decl (name, fntype);
2624 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2625 build_real (type, dconst0), arg);
2627 /* Make sure not to fold the cexp call again. */
2628 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2629 return expand_expr (build_call_nary (ctype, call, 1, narg),
2630 target, VOIDmode, EXPAND_NORMAL);
2633 /* Now build the proper return type. */
2634 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2635 make_tree (TREE_TYPE (arg), op2),
2636 make_tree (TREE_TYPE (arg), op1)),
2637 target, VOIDmode, EXPAND_NORMAL);
2640 /* Conveniently construct a function call expression. FNDECL names the
2641 function to be called, N is the number of arguments, and the "..."
2642 parameters are the argument expressions. Unlike build_call_exr
2643 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2645 static tree
2646 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2648 va_list ap;
2649 tree fntype = TREE_TYPE (fndecl);
2650 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2652 va_start (ap, n);
2653 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2654 va_end (ap);
2655 SET_EXPR_LOCATION (fn, loc);
2656 return fn;
2659 /* Expand a call to one of the builtin rounding functions gcc defines
2660 as an extension (lfloor and lceil). As these are gcc extensions we
2661 do not need to worry about setting errno to EDOM.
2662 If expanding via optab fails, lower expression to (int)(floor(x)).
2663 EXP is the expression that is a call to the builtin function;
2664 if convenient, the result should be placed in TARGET. */
2666 static rtx
2667 expand_builtin_int_roundingfn (tree exp, rtx target)
2669 convert_optab builtin_optab;
2670 rtx op0, tmp;
2671 rtx_insn *insns;
2672 tree fndecl = get_callee_fndecl (exp);
2673 enum built_in_function fallback_fn;
2674 tree fallback_fndecl;
2675 machine_mode mode;
2676 tree arg;
2678 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2679 return NULL_RTX;
2681 arg = CALL_EXPR_ARG (exp, 0);
2683 switch (DECL_FUNCTION_CODE (fndecl))
2685 CASE_FLT_FN (BUILT_IN_ICEIL):
2686 CASE_FLT_FN (BUILT_IN_LCEIL):
2687 CASE_FLT_FN (BUILT_IN_LLCEIL):
2688 builtin_optab = lceil_optab;
2689 fallback_fn = BUILT_IN_CEIL;
2690 break;
2692 CASE_FLT_FN (BUILT_IN_IFLOOR):
2693 CASE_FLT_FN (BUILT_IN_LFLOOR):
2694 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2695 builtin_optab = lfloor_optab;
2696 fallback_fn = BUILT_IN_FLOOR;
2697 break;
2699 default:
2700 gcc_unreachable ();
2703 /* Make a suitable register to place result in. */
2704 mode = TYPE_MODE (TREE_TYPE (exp));
2706 target = gen_reg_rtx (mode);
2708 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2709 need to expand the argument again. This way, we will not perform
2710 side-effects more the once. */
2711 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2713 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2715 start_sequence ();
2717 /* Compute into TARGET. */
2718 if (expand_sfix_optab (target, op0, builtin_optab))
2720 /* Output the entire sequence. */
2721 insns = get_insns ();
2722 end_sequence ();
2723 emit_insn (insns);
2724 return target;
2727 /* If we were unable to expand via the builtin, stop the sequence
2728 (without outputting the insns). */
2729 end_sequence ();
2731 /* Fall back to floating point rounding optab. */
2732 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2734 /* For non-C99 targets we may end up without a fallback fndecl here
2735 if the user called __builtin_lfloor directly. In this case emit
2736 a call to the floor/ceil variants nevertheless. This should result
2737 in the best user experience for not full C99 targets. */
2738 if (fallback_fndecl == NULL_TREE)
2740 tree fntype;
2741 const char *name = NULL;
2743 switch (DECL_FUNCTION_CODE (fndecl))
2745 case BUILT_IN_ICEIL:
2746 case BUILT_IN_LCEIL:
2747 case BUILT_IN_LLCEIL:
2748 name = "ceil";
2749 break;
2750 case BUILT_IN_ICEILF:
2751 case BUILT_IN_LCEILF:
2752 case BUILT_IN_LLCEILF:
2753 name = "ceilf";
2754 break;
2755 case BUILT_IN_ICEILL:
2756 case BUILT_IN_LCEILL:
2757 case BUILT_IN_LLCEILL:
2758 name = "ceill";
2759 break;
2760 case BUILT_IN_IFLOOR:
2761 case BUILT_IN_LFLOOR:
2762 case BUILT_IN_LLFLOOR:
2763 name = "floor";
2764 break;
2765 case BUILT_IN_IFLOORF:
2766 case BUILT_IN_LFLOORF:
2767 case BUILT_IN_LLFLOORF:
2768 name = "floorf";
2769 break;
2770 case BUILT_IN_IFLOORL:
2771 case BUILT_IN_LFLOORL:
2772 case BUILT_IN_LLFLOORL:
2773 name = "floorl";
2774 break;
2775 default:
2776 gcc_unreachable ();
2779 fntype = build_function_type_list (TREE_TYPE (arg),
2780 TREE_TYPE (arg), NULL_TREE);
2781 fallback_fndecl = build_fn_decl (name, fntype);
2784 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2786 tmp = expand_normal (exp);
2787 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2789 /* Truncate the result of floating point optab to integer
2790 via expand_fix (). */
2791 target = gen_reg_rtx (mode);
2792 expand_fix (target, tmp, 0);
2794 return target;
2797 /* Expand a call to one of the builtin math functions doing integer
2798 conversion (lrint).
2799 Return 0 if a normal call should be emitted rather than expanding the
2800 function in-line. EXP is the expression that is a call to the builtin
2801 function; if convenient, the result should be placed in TARGET. */
2803 static rtx
2804 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2806 convert_optab builtin_optab;
2807 rtx op0;
2808 rtx_insn *insns;
2809 tree fndecl = get_callee_fndecl (exp);
2810 tree arg;
2811 machine_mode mode;
2812 enum built_in_function fallback_fn = BUILT_IN_NONE;
2814 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2815 return NULL_RTX;
2817 arg = CALL_EXPR_ARG (exp, 0);
2819 switch (DECL_FUNCTION_CODE (fndecl))
2821 CASE_FLT_FN (BUILT_IN_IRINT):
2822 fallback_fn = BUILT_IN_LRINT;
2823 gcc_fallthrough ();
2824 CASE_FLT_FN (BUILT_IN_LRINT):
2825 CASE_FLT_FN (BUILT_IN_LLRINT):
2826 builtin_optab = lrint_optab;
2827 break;
2829 CASE_FLT_FN (BUILT_IN_IROUND):
2830 fallback_fn = BUILT_IN_LROUND;
2831 gcc_fallthrough ();
2832 CASE_FLT_FN (BUILT_IN_LROUND):
2833 CASE_FLT_FN (BUILT_IN_LLROUND):
2834 builtin_optab = lround_optab;
2835 break;
2837 default:
2838 gcc_unreachable ();
2841 /* There's no easy way to detect the case we need to set EDOM. */
2842 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2843 return NULL_RTX;
2845 /* Make a suitable register to place result in. */
2846 mode = TYPE_MODE (TREE_TYPE (exp));
2848 /* There's no easy way to detect the case we need to set EDOM. */
2849 if (!flag_errno_math)
2851 rtx result = gen_reg_rtx (mode);
2853 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2854 need to expand the argument again. This way, we will not perform
2855 side-effects more the once. */
2856 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2858 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2860 start_sequence ();
2862 if (expand_sfix_optab (result, op0, builtin_optab))
2864 /* Output the entire sequence. */
2865 insns = get_insns ();
2866 end_sequence ();
2867 emit_insn (insns);
2868 return result;
2871 /* If we were unable to expand via the builtin, stop the sequence
2872 (without outputting the insns) and call to the library function
2873 with the stabilized argument list. */
2874 end_sequence ();
2877 if (fallback_fn != BUILT_IN_NONE)
2879 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2880 targets, (int) round (x) should never be transformed into
2881 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2882 a call to lround in the hope that the target provides at least some
2883 C99 functions. This should result in the best user experience for
2884 not full C99 targets. */
2885 tree fallback_fndecl = mathfn_built_in_1
2886 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2888 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2889 fallback_fndecl, 1, arg);
2891 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2892 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2893 return convert_to_mode (mode, target, 0);
2896 return expand_call (exp, target, target == const0_rtx);
2899 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2900 a normal call should be emitted rather than expanding the function
2901 in-line. EXP is the expression that is a call to the builtin
2902 function; if convenient, the result should be placed in TARGET. */
2904 static rtx
2905 expand_builtin_powi (tree exp, rtx target)
2907 tree arg0, arg1;
2908 rtx op0, op1;
2909 machine_mode mode;
2910 machine_mode mode2;
2912 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2913 return NULL_RTX;
2915 arg0 = CALL_EXPR_ARG (exp, 0);
2916 arg1 = CALL_EXPR_ARG (exp, 1);
2917 mode = TYPE_MODE (TREE_TYPE (exp));
2919 /* Emit a libcall to libgcc. */
2921 /* Mode of the 2nd argument must match that of an int. */
2922 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2924 if (target == NULL_RTX)
2925 target = gen_reg_rtx (mode);
2927 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2928 if (GET_MODE (op0) != mode)
2929 op0 = convert_to_mode (mode, op0, 0);
2930 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2931 if (GET_MODE (op1) != mode2)
2932 op1 = convert_to_mode (mode2, op1, 0);
2934 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2935 target, LCT_CONST, mode,
2936 op0, mode, op1, mode2);
2938 return target;
2941 /* Expand expression EXP which is a call to the strlen builtin. Return
2942 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2943 try to get the result in TARGET, if convenient. */
2945 static rtx
2946 expand_builtin_strlen (tree exp, rtx target,
2947 machine_mode target_mode)
2949 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
2952 tree src = CALL_EXPR_ARG (exp, 0);
2954 /* If the length can be computed at compile-time, return it. */
2955 if (tree len = c_strlen (src, 0))
2956 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2958 /* If the length can be computed at compile-time and is constant
2959 integer, but there are side-effects in src, evaluate
2960 src for side-effects, then return len.
2961 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2962 can be optimized into: i++; x = 3; */
2963 tree len = c_strlen (src, 1);
2964 if (len && TREE_CODE (len) == INTEGER_CST)
2966 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2967 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2970 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
2972 /* If SRC is not a pointer type, don't do this operation inline. */
2973 if (align == 0)
2974 return NULL_RTX;
2976 /* Bail out if we can't compute strlen in the right mode. */
2977 machine_mode insn_mode;
2978 enum insn_code icode = CODE_FOR_nothing;
2979 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2981 icode = optab_handler (strlen_optab, insn_mode);
2982 if (icode != CODE_FOR_nothing)
2983 break;
2985 if (insn_mode == VOIDmode)
2986 return NULL_RTX;
2988 /* Make a place to hold the source address. We will not expand
2989 the actual source until we are sure that the expansion will
2990 not fail -- there are trees that cannot be expanded twice. */
2991 rtx src_reg = gen_reg_rtx (Pmode);
2993 /* Mark the beginning of the strlen sequence so we can emit the
2994 source operand later. */
2995 rtx_insn *before_strlen = get_last_insn ();
2997 class expand_operand ops[4];
2998 create_output_operand (&ops[0], target, insn_mode);
2999 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3000 create_integer_operand (&ops[2], 0);
3001 create_integer_operand (&ops[3], align);
3002 if (!maybe_expand_insn (icode, 4, ops))
3003 return NULL_RTX;
3005 /* Check to see if the argument was declared attribute nonstring
3006 and if so, issue a warning since at this point it's not known
3007 to be nul-terminated. */
3008 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3010 /* Now that we are assured of success, expand the source. */
3011 start_sequence ();
3012 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3013 if (pat != src_reg)
3015 #ifdef POINTERS_EXTEND_UNSIGNED
3016 if (GET_MODE (pat) != Pmode)
3017 pat = convert_to_mode (Pmode, pat,
3018 POINTERS_EXTEND_UNSIGNED);
3019 #endif
3020 emit_move_insn (src_reg, pat);
3022 pat = get_insns ();
3023 end_sequence ();
3025 if (before_strlen)
3026 emit_insn_after (pat, before_strlen);
3027 else
3028 emit_insn_before (pat, get_insns ());
3030 /* Return the value in the proper mode for this function. */
3031 if (GET_MODE (ops[0].value) == target_mode)
3032 target = ops[0].value;
3033 else if (target != 0)
3034 convert_move (target, ops[0].value, 0);
3035 else
3036 target = convert_to_mode (target_mode, ops[0].value, 0);
3038 return target;
3041 /* Expand call EXP to the strnlen built-in, returning the result
3042 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3044 static rtx
3045 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3047 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3048 return NULL_RTX;
3050 tree src = CALL_EXPR_ARG (exp, 0);
3051 tree bound = CALL_EXPR_ARG (exp, 1);
3053 if (!bound)
3054 return NULL_RTX;
3056 location_t loc = UNKNOWN_LOCATION;
3057 if (EXPR_HAS_LOCATION (exp))
3058 loc = EXPR_LOCATION (exp);
3060 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3061 so these conversions aren't necessary. */
3062 c_strlen_data lendata = { };
3063 tree len = c_strlen (src, 0, &lendata, 1);
3064 if (len)
3065 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3067 if (TREE_CODE (bound) == INTEGER_CST)
3069 if (!len)
3070 return NULL_RTX;
3072 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3073 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3076 if (TREE_CODE (bound) != SSA_NAME)
3077 return NULL_RTX;
3079 wide_int min, max;
3080 value_range r;
3081 get_global_range_query ()->range_of_expr (r, bound);
3082 if (r.kind () != VR_RANGE)
3083 return NULL_RTX;
3084 min = r.lower_bound ();
3085 max = r.upper_bound ();
3087 if (!len || TREE_CODE (len) != INTEGER_CST)
3089 bool exact;
3090 lendata.decl = unterminated_array (src, &len, &exact);
3091 if (!lendata.decl)
3092 return NULL_RTX;
3095 if (lendata.decl)
3096 return NULL_RTX;
3098 if (wi::gtu_p (min, wi::to_wide (len)))
3099 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3102 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3105 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3106 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3107 a target constant. */
3109 static rtx
3110 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3111 fixed_size_mode mode)
3113 /* The REPresentation pointed to by DATA need not be a nul-terminated
3114 string but the caller guarantees it's large enough for MODE. */
3115 const char *rep = (const char *) data;
3117 /* The by-pieces infrastructure does not try to pick a vector mode
3118 for memcpy expansion. */
3119 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3120 /*nul_terminated=*/false);
3123 /* LEN specify length of the block of memcpy/memset operation.
3124 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3125 In some cases we can make very likely guess on max size, then we
3126 set it into PROBABLE_MAX_SIZE. */
3128 static void
3129 determine_block_size (tree len, rtx len_rtx,
3130 unsigned HOST_WIDE_INT *min_size,
3131 unsigned HOST_WIDE_INT *max_size,
3132 unsigned HOST_WIDE_INT *probable_max_size)
3134 if (CONST_INT_P (len_rtx))
3136 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3137 return;
3139 else
3141 wide_int min, max;
3142 enum value_range_kind range_type = VR_UNDEFINED;
3144 /* Determine bounds from the type. */
3145 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3146 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3147 else
3148 *min_size = 0;
3149 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3150 *probable_max_size = *max_size
3151 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3152 else
3153 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3155 if (TREE_CODE (len) == SSA_NAME)
3157 value_range r;
3158 get_global_range_query ()->range_of_expr (r, len);
3159 range_type = r.kind ();
3160 if (range_type != VR_UNDEFINED)
3162 min = wi::to_wide (r.min ());
3163 max = wi::to_wide (r.max ());
3166 if (range_type == VR_RANGE)
3168 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3169 *min_size = min.to_uhwi ();
3170 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3171 *probable_max_size = *max_size = max.to_uhwi ();
3173 else if (range_type == VR_ANTI_RANGE)
3175 /* Code like
3177 int n;
3178 if (n < 100)
3179 memcpy (a, b, n)
3181 Produce anti range allowing negative values of N. We still
3182 can use the information and make a guess that N is not negative.
3184 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3185 *probable_max_size = min.to_uhwi () - 1;
3188 gcc_checking_assert (*max_size <=
3189 (unsigned HOST_WIDE_INT)
3190 GET_MODE_MASK (GET_MODE (len_rtx)));
3193 /* Expand a call EXP to the memcpy builtin.
3194 Return NULL_RTX if we failed, the caller should emit a normal call,
3195 otherwise try to get the result in TARGET, if convenient (and in
3196 mode MODE if that's convenient). */
3198 static rtx
3199 expand_builtin_memcpy (tree exp, rtx target)
3201 if (!validate_arglist (exp,
3202 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3203 return NULL_RTX;
3205 tree dest = CALL_EXPR_ARG (exp, 0);
3206 tree src = CALL_EXPR_ARG (exp, 1);
3207 tree len = CALL_EXPR_ARG (exp, 2);
3209 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3210 /*retmode=*/ RETURN_BEGIN, false);
3213 /* Check a call EXP to the memmove built-in for validity.
3214 Return NULL_RTX on both success and failure. */
3216 static rtx
3217 expand_builtin_memmove (tree exp, rtx target)
3219 if (!validate_arglist (exp,
3220 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3221 return NULL_RTX;
3223 tree dest = CALL_EXPR_ARG (exp, 0);
3224 tree src = CALL_EXPR_ARG (exp, 1);
3225 tree len = CALL_EXPR_ARG (exp, 2);
3227 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3228 /*retmode=*/ RETURN_BEGIN, true);
3231 /* Expand a call EXP to the mempcpy builtin.
3232 Return NULL_RTX if we failed; the caller should emit a normal call,
3233 otherwise try to get the result in TARGET, if convenient (and in
3234 mode MODE if that's convenient). */
3236 static rtx
3237 expand_builtin_mempcpy (tree exp, rtx target)
3239 if (!validate_arglist (exp,
3240 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3241 return NULL_RTX;
3243 tree dest = CALL_EXPR_ARG (exp, 0);
3244 tree src = CALL_EXPR_ARG (exp, 1);
3245 tree len = CALL_EXPR_ARG (exp, 2);
3247 /* Policy does not generally allow using compute_objsize (which
3248 is used internally by check_memop_size) to change code generation
3249 or drive optimization decisions.
3251 In this instance it is safe because the code we generate has
3252 the same semantics regardless of the return value of
3253 check_memop_sizes. Exactly the same amount of data is copied
3254 and the return value is exactly the same in both cases.
3256 Furthermore, check_memop_size always uses mode 0 for the call to
3257 compute_objsize, so the imprecise nature of compute_objsize is
3258 avoided. */
3260 /* Avoid expanding mempcpy into memcpy when the call is determined
3261 to overflow the buffer. This also prevents the same overflow
3262 from being diagnosed again when expanding memcpy. */
3264 return expand_builtin_mempcpy_args (dest, src, len,
3265 target, exp, /*retmode=*/ RETURN_END);
3268 /* Helper function to do the actual work for expand of memory copy family
3269 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3270 of memory from SRC to DEST and assign to TARGET if convenient. Return
3271 value is based on RETMODE argument. */
3273 static rtx
3274 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3275 rtx target, tree exp, memop_ret retmode,
3276 bool might_overlap)
3278 unsigned int src_align = get_pointer_alignment (src);
3279 unsigned int dest_align = get_pointer_alignment (dest);
3280 rtx dest_mem, src_mem, dest_addr, len_rtx;
3281 HOST_WIDE_INT expected_size = -1;
3282 unsigned int expected_align = 0;
3283 unsigned HOST_WIDE_INT min_size;
3284 unsigned HOST_WIDE_INT max_size;
3285 unsigned HOST_WIDE_INT probable_max_size;
3287 bool is_move_done;
3289 /* If DEST is not a pointer type, call the normal function. */
3290 if (dest_align == 0)
3291 return NULL_RTX;
3293 /* If either SRC is not a pointer type, don't do this
3294 operation in-line. */
3295 if (src_align == 0)
3296 return NULL_RTX;
3298 if (currently_expanding_gimple_stmt)
3299 stringop_block_profile (currently_expanding_gimple_stmt,
3300 &expected_align, &expected_size);
3302 if (expected_align < dest_align)
3303 expected_align = dest_align;
3304 dest_mem = get_memory_rtx (dest, len);
3305 set_mem_align (dest_mem, dest_align);
3306 len_rtx = expand_normal (len);
3307 determine_block_size (len, len_rtx, &min_size, &max_size,
3308 &probable_max_size);
3310 /* Try to get the byte representation of the constant SRC points to,
3311 with its byte size in NBYTES. */
3312 unsigned HOST_WIDE_INT nbytes;
3313 const char *rep = getbyterep (src, &nbytes);
3315 /* If the function's constant bound LEN_RTX is less than or equal
3316 to the byte size of the representation of the constant argument,
3317 and if block move would be done by pieces, we can avoid loading
3318 the bytes from memory and only store the computed constant.
3319 This works in the overlap (memmove) case as well because
3320 store_by_pieces just generates a series of stores of constants
3321 from the representation returned by getbyterep(). */
3322 if (rep
3323 && CONST_INT_P (len_rtx)
3324 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3325 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3326 CONST_CAST (char *, rep),
3327 dest_align, false))
3329 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3330 builtin_memcpy_read_str,
3331 CONST_CAST (char *, rep),
3332 dest_align, false, retmode);
3333 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3334 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3335 return dest_mem;
3338 src_mem = get_memory_rtx (src, len);
3339 set_mem_align (src_mem, src_align);
3341 /* Copy word part most expediently. */
3342 enum block_op_methods method = BLOCK_OP_NORMAL;
3343 if (CALL_EXPR_TAILCALL (exp)
3344 && (retmode == RETURN_BEGIN || target == const0_rtx))
3345 method = BLOCK_OP_TAILCALL;
3346 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3347 && retmode == RETURN_END
3348 && !might_overlap
3349 && target != const0_rtx);
3350 if (use_mempcpy_call)
3351 method = BLOCK_OP_NO_LIBCALL_RET;
3352 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3353 expected_align, expected_size,
3354 min_size, max_size, probable_max_size,
3355 use_mempcpy_call, &is_move_done,
3356 might_overlap);
3358 /* Bail out when a mempcpy call would be expanded as libcall and when
3359 we have a target that provides a fast implementation
3360 of mempcpy routine. */
3361 if (!is_move_done)
3362 return NULL_RTX;
3364 if (dest_addr == pc_rtx)
3365 return NULL_RTX;
3367 if (dest_addr == 0)
3369 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3370 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3373 if (retmode != RETURN_BEGIN && target != const0_rtx)
3375 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3376 /* stpcpy pointer to last byte. */
3377 if (retmode == RETURN_END_MINUS_ONE)
3378 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3381 return dest_addr;
3384 static rtx
3385 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3386 rtx target, tree orig_exp, memop_ret retmode)
3388 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3389 retmode, false);
3392 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3393 we failed, the caller should emit a normal call, otherwise try to
3394 get the result in TARGET, if convenient.
3395 Return value is based on RETMODE argument. */
3397 static rtx
3398 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3400 class expand_operand ops[3];
3401 rtx dest_mem;
3402 rtx src_mem;
3404 if (!targetm.have_movstr ())
3405 return NULL_RTX;
3407 dest_mem = get_memory_rtx (dest, NULL);
3408 src_mem = get_memory_rtx (src, NULL);
3409 if (retmode == RETURN_BEGIN)
3411 target = force_reg (Pmode, XEXP (dest_mem, 0));
3412 dest_mem = replace_equiv_address (dest_mem, target);
3415 create_output_operand (&ops[0],
3416 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3417 create_fixed_operand (&ops[1], dest_mem);
3418 create_fixed_operand (&ops[2], src_mem);
3419 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3420 return NULL_RTX;
3422 if (retmode != RETURN_BEGIN && target != const0_rtx)
3424 target = ops[0].value;
3425 /* movstr is supposed to set end to the address of the NUL
3426 terminator. If the caller requested a mempcpy-like return value,
3427 adjust it. */
3428 if (retmode == RETURN_END)
3430 rtx tem = plus_constant (GET_MODE (target),
3431 gen_lowpart (GET_MODE (target), target), 1);
3432 emit_move_insn (target, force_operand (tem, NULL_RTX));
3435 return target;
3438 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3439 NULL_RTX if we failed the caller should emit a normal call, otherwise
3440 try to get the result in TARGET, if convenient (and in mode MODE if that's
3441 convenient). */
3443 static rtx
3444 expand_builtin_strcpy (tree exp, rtx target)
3446 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3449 tree dest = CALL_EXPR_ARG (exp, 0);
3450 tree src = CALL_EXPR_ARG (exp, 1);
3452 return expand_builtin_strcpy_args (exp, dest, src, target);
3455 /* Helper function to do the actual work for expand_builtin_strcpy. The
3456 arguments to the builtin_strcpy call DEST and SRC are broken out
3457 so that this can also be called without constructing an actual CALL_EXPR.
3458 The other arguments and return value are the same as for
3459 expand_builtin_strcpy. */
3461 static rtx
3462 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3464 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3467 /* Expand a call EXP to the stpcpy builtin.
3468 Return NULL_RTX if we failed the caller should emit a normal call,
3469 otherwise try to get the result in TARGET, if convenient (and in
3470 mode MODE if that's convenient). */
3472 static rtx
3473 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3475 tree dst, src;
3476 location_t loc = EXPR_LOCATION (exp);
3478 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3479 return NULL_RTX;
3481 dst = CALL_EXPR_ARG (exp, 0);
3482 src = CALL_EXPR_ARG (exp, 1);
3484 /* If return value is ignored, transform stpcpy into strcpy. */
3485 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3487 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3488 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3489 return expand_expr (result, target, mode, EXPAND_NORMAL);
3491 else
3493 tree len, lenp1;
3494 rtx ret;
3496 /* Ensure we get an actual string whose length can be evaluated at
3497 compile-time, not an expression containing a string. This is
3498 because the latter will potentially produce pessimized code
3499 when used to produce the return value. */
3500 c_strlen_data lendata = { };
3501 if (!c_getstr (src)
3502 || !(len = c_strlen (src, 0, &lendata, 1)))
3503 return expand_movstr (dst, src, target,
3504 /*retmode=*/ RETURN_END_MINUS_ONE);
3506 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3507 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3508 target, exp,
3509 /*retmode=*/ RETURN_END_MINUS_ONE);
3511 if (ret)
3512 return ret;
3514 if (TREE_CODE (len) == INTEGER_CST)
3516 rtx len_rtx = expand_normal (len);
3518 if (CONST_INT_P (len_rtx))
3520 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3522 if (ret)
3524 if (! target)
3526 if (mode != VOIDmode)
3527 target = gen_reg_rtx (mode);
3528 else
3529 target = gen_reg_rtx (GET_MODE (ret));
3531 if (GET_MODE (target) != GET_MODE (ret))
3532 ret = gen_lowpart (GET_MODE (target), ret);
3534 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3535 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3536 gcc_assert (ret);
3538 return target;
3543 return expand_movstr (dst, src, target,
3544 /*retmode=*/ RETURN_END_MINUS_ONE);
3548 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3549 arguments while being careful to avoid duplicate warnings (which could
3550 be issued if the expander were to expand the call, resulting in it
3551 being emitted in expand_call(). */
3553 static rtx
3554 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3556 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3558 /* The call has been successfully expanded. Check for nonstring
3559 arguments and issue warnings as appropriate. */
3560 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3561 return ret;
3564 return NULL_RTX;
3567 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3568 bytes from constant string DATA + OFFSET and return it as target
3569 constant. */
3572 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3573 fixed_size_mode mode)
3575 const char *str = (const char *) data;
3577 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3578 return const0_rtx;
3580 /* The by-pieces infrastructure does not try to pick a vector mode
3581 for strncpy expansion. */
3582 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3585 /* Helper to check the sizes of sequences and the destination of calls
3586 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3587 success (no overflow or invalid sizes), false otherwise. */
3589 static bool
3590 check_strncat_sizes (tree exp, tree objsize)
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree src = CALL_EXPR_ARG (exp, 1);
3594 tree maxread = CALL_EXPR_ARG (exp, 2);
3596 /* Try to determine the range of lengths that the source expression
3597 refers to. */
3598 c_strlen_data lendata = { };
3599 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3601 /* Try to verify that the destination is big enough for the shortest
3602 string. */
3604 access_data data (exp, access_read_write, maxread, true);
3605 if (!objsize && warn_stringop_overflow)
3607 /* If it hasn't been provided by __strncat_chk, try to determine
3608 the size of the destination object into which the source is
3609 being copied. */
3610 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3613 /* Add one for the terminating nul. */
3614 tree srclen = (lendata.minlen
3615 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3616 size_one_node)
3617 : NULL_TREE);
3619 /* The strncat function copies at most MAXREAD bytes and always appends
3620 the terminating nul so the specified upper bound should never be equal
3621 to (or greater than) the size of the destination. */
3622 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3623 && tree_int_cst_equal (objsize, maxread))
3625 location_t loc = EXPR_LOCATION (exp);
3626 warning_at (loc, OPT_Wstringop_overflow_,
3627 "%qD specified bound %E equals destination size",
3628 get_callee_fndecl (exp), maxread);
3630 return false;
3633 if (!srclen
3634 || (maxread && tree_fits_uhwi_p (maxread)
3635 && tree_fits_uhwi_p (srclen)
3636 && tree_int_cst_lt (maxread, srclen)))
3637 srclen = maxread;
3639 /* The number of bytes to write is LEN but check_access will alsoa
3640 check SRCLEN if LEN's value isn't known. */
3641 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3642 objsize, data.mode, &data);
3645 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3646 NULL_RTX if we failed the caller should emit a normal call. */
3648 static rtx
3649 expand_builtin_strncpy (tree exp, rtx target)
3651 location_t loc = EXPR_LOCATION (exp);
3653 if (!validate_arglist (exp,
3654 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3655 return NULL_RTX;
3656 tree dest = CALL_EXPR_ARG (exp, 0);
3657 tree src = CALL_EXPR_ARG (exp, 1);
3658 /* The number of bytes to write (not the maximum). */
3659 tree len = CALL_EXPR_ARG (exp, 2);
3661 /* The length of the source sequence. */
3662 tree slen = c_strlen (src, 1);
3664 /* We must be passed a constant len and src parameter. */
3665 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3666 return NULL_RTX;
3668 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3670 /* We're required to pad with trailing zeros if the requested
3671 len is greater than strlen(s2)+1. In that case try to
3672 use store_by_pieces, if it fails, punt. */
3673 if (tree_int_cst_lt (slen, len))
3675 unsigned int dest_align = get_pointer_alignment (dest);
3676 const char *p = c_getstr (src);
3677 rtx dest_mem;
3679 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3680 || !can_store_by_pieces (tree_to_uhwi (len),
3681 builtin_strncpy_read_str,
3682 CONST_CAST (char *, p),
3683 dest_align, false))
3684 return NULL_RTX;
3686 dest_mem = get_memory_rtx (dest, len);
3687 store_by_pieces (dest_mem, tree_to_uhwi (len),
3688 builtin_strncpy_read_str,
3689 CONST_CAST (char *, p), dest_align, false,
3690 RETURN_BEGIN);
3691 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3692 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3693 return dest_mem;
3696 return NULL_RTX;
3699 /* Return the RTL of a register in MODE generated from PREV in the
3700 previous iteration. */
3702 static rtx
3703 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3705 rtx target = nullptr;
3706 if (prev != nullptr && prev->data != nullptr)
3708 /* Use the previous data in the same mode. */
3709 if (prev->mode == mode)
3710 return prev->data;
3712 fixed_size_mode prev_mode = prev->mode;
3714 /* Don't use the previous data to write QImode if it is in a
3715 vector mode. */
3716 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3717 return target;
3719 rtx prev_rtx = prev->data;
3721 if (REG_P (prev_rtx)
3722 && HARD_REGISTER_P (prev_rtx)
3723 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3725 /* This case occurs when PREV_MODE is a vector and when
3726 MODE is too small to store using vector operations.
3727 After register allocation, the code will need to move the
3728 lowpart of the vector register into a non-vector register.
3730 Also, the target has chosen to use a hard register
3731 instead of going with the default choice of using a
3732 pseudo register. We should respect that choice and try to
3733 avoid creating a pseudo register with the same mode as the
3734 current hard register.
3736 In principle, we could just use a lowpart MODE subreg of
3737 the vector register. However, the vector register mode might
3738 be too wide for non-vector registers, and we already know
3739 that the non-vector mode is too small for vector registers.
3740 It's therefore likely that we'd need to spill to memory in
3741 the vector mode and reload the non-vector value from there.
3743 Try to avoid that by reducing the vector register to the
3744 smallest size that it can hold. This should increase the
3745 chances that non-vector registers can hold both the inner
3746 and outer modes of the subreg that we generate later. */
3747 machine_mode m;
3748 fixed_size_mode candidate;
3749 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3750 if (is_a<fixed_size_mode> (m, &candidate))
3752 if (GET_MODE_SIZE (candidate)
3753 >= GET_MODE_SIZE (prev_mode))
3754 break;
3755 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3756 && lowpart_subreg_regno (REGNO (prev_rtx),
3757 prev_mode, candidate) >= 0)
3759 target = lowpart_subreg (candidate, prev_rtx,
3760 prev_mode);
3761 prev_rtx = target;
3762 prev_mode = candidate;
3763 break;
3766 if (target == nullptr)
3767 prev_rtx = copy_to_reg (prev_rtx);
3770 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3772 return target;
3775 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3776 bytes from constant string DATA + OFFSET and return it as target
3777 constant. If PREV isn't nullptr, it has the RTL info from the
3778 previous iteration. */
3781 builtin_memset_read_str (void *data, void *prev,
3782 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3783 fixed_size_mode mode)
3785 const char *c = (const char *) data;
3786 unsigned int size = GET_MODE_SIZE (mode);
3788 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3789 mode);
3790 if (target != nullptr)
3791 return target;
3792 rtx src = gen_int_mode (*c, QImode);
3794 if (VECTOR_MODE_P (mode))
3796 gcc_assert (GET_MODE_INNER (mode) == QImode);
3798 rtx const_vec = gen_const_vec_duplicate (mode, src);
3799 if (prev == NULL)
3800 /* Return CONST_VECTOR when called by a query function. */
3801 return const_vec;
3803 /* Use the move expander with CONST_VECTOR. */
3804 target = targetm.gen_memset_scratch_rtx (mode);
3805 emit_move_insn (target, const_vec);
3806 return target;
3809 char *p = XALLOCAVEC (char, size);
3811 memset (p, *c, size);
3813 /* Vector modes should be handled above. */
3814 return c_readstr (p, as_a <scalar_int_mode> (mode));
3817 /* Callback routine for store_by_pieces. Return the RTL of a register
3818 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3819 char value given in the RTL register data. For example, if mode is
3820 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3821 nullptr, it has the RTL info from the previous iteration. */
3823 static rtx
3824 builtin_memset_gen_str (void *data, void *prev,
3825 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3826 fixed_size_mode mode)
3828 rtx target, coeff;
3829 size_t size;
3830 char *p;
3832 size = GET_MODE_SIZE (mode);
3833 if (size == 1)
3834 return (rtx) data;
3836 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3837 if (target != nullptr)
3838 return target;
3840 if (VECTOR_MODE_P (mode))
3842 gcc_assert (GET_MODE_INNER (mode) == QImode);
3844 /* vec_duplicate_optab is a precondition to pick a vector mode for
3845 the memset expander. */
3846 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3848 target = targetm.gen_memset_scratch_rtx (mode);
3849 class expand_operand ops[2];
3850 create_output_operand (&ops[0], target, mode);
3851 create_input_operand (&ops[1], (rtx) data, QImode);
3852 expand_insn (icode, 2, ops);
3853 if (!rtx_equal_p (target, ops[0].value))
3854 emit_move_insn (target, ops[0].value);
3856 return target;
3859 p = XALLOCAVEC (char, size);
3860 memset (p, 1, size);
3861 /* Vector modes should be handled above. */
3862 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3864 target = convert_to_mode (mode, (rtx) data, 1);
3865 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3866 return force_reg (mode, target);
3869 /* Expand expression EXP, which is a call to the memset builtin. Return
3870 NULL_RTX if we failed the caller should emit a normal call, otherwise
3871 try to get the result in TARGET, if convenient (and in mode MODE if that's
3872 convenient). */
3875 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3877 if (!validate_arglist (exp,
3878 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3879 return NULL_RTX;
3881 tree dest = CALL_EXPR_ARG (exp, 0);
3882 tree val = CALL_EXPR_ARG (exp, 1);
3883 tree len = CALL_EXPR_ARG (exp, 2);
3885 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3888 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3889 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3890 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3891 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3893 The strategy is to issue one store_by_pieces for each power of two,
3894 from most to least significant, guarded by a test on whether there
3895 are at least that many bytes left to copy in LEN.
3897 ??? Should we skip some powers of two in favor of loops? Maybe start
3898 at the max of TO/LEN/word alignment, at least when optimizing for
3899 size, instead of ensuring O(log len) dynamic compares? */
3901 bool
3902 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3903 unsigned HOST_WIDE_INT min_len,
3904 unsigned HOST_WIDE_INT max_len,
3905 rtx val, char valc, unsigned int align)
3907 int max_bits = floor_log2 (max_len);
3908 int min_bits = floor_log2 (min_len);
3909 int sctz_len = ctz_len;
3911 gcc_checking_assert (sctz_len >= 0);
3913 if (val)
3914 valc = 1;
3916 /* Bits more significant than TST_BITS are part of the shared prefix
3917 in the binary representation of both min_len and max_len. Since
3918 they're identical, we don't need to test them in the loop. */
3919 int tst_bits = (max_bits != min_bits ? max_bits
3920 : floor_log2 (max_len ^ min_len));
3922 /* Check whether it's profitable to start by storing a fixed BLKSIZE
3923 bytes, to lower max_bits. In the unlikely case of a constant LEN
3924 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3925 single store_by_pieces, but otherwise, select the minimum multiple
3926 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3927 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
3928 unsigned HOST_WIDE_INT blksize;
3929 if (max_len > min_len)
3931 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
3932 align / BITS_PER_UNIT);
3933 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
3934 blksize &= ~(alrng - 1);
3936 else if (max_len == min_len)
3937 blksize = max_len;
3938 else
3939 gcc_unreachable ();
3940 if (min_len >= blksize)
3942 min_len -= blksize;
3943 min_bits = floor_log2 (min_len);
3944 max_len -= blksize;
3945 max_bits = floor_log2 (max_len);
3947 tst_bits = (max_bits != min_bits ? max_bits
3948 : floor_log2 (max_len ^ min_len));
3950 else
3951 blksize = 0;
3953 /* Check that we can use store by pieces for the maximum store count
3954 we may issue (initial fixed-size block, plus conditional
3955 power-of-two-sized from max_bits to ctz_len. */
3956 unsigned HOST_WIDE_INT xlenest = blksize;
3957 if (max_bits >= 0)
3958 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
3959 - (HOST_WIDE_INT_1U << ctz_len));
3960 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
3961 &valc, align, true))
3962 return false;
3964 by_pieces_constfn constfun;
3965 void *constfundata;
3966 if (val)
3968 constfun = builtin_memset_gen_str;
3969 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
3970 val);
3972 else
3974 constfun = builtin_memset_read_str;
3975 constfundata = &valc;
3978 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
3979 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
3980 to = replace_equiv_address (to, ptr);
3981 set_mem_align (to, align);
3983 if (blksize)
3985 to = store_by_pieces (to, blksize,
3986 constfun, constfundata,
3987 align, true,
3988 max_len != 0 ? RETURN_END : RETURN_BEGIN);
3989 if (max_len == 0)
3990 return true;
3992 /* Adjust PTR, TO and REM. Since TO's address is likely
3993 PTR+offset, we have to replace it. */
3994 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
3995 to = replace_equiv_address (to, ptr);
3996 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
3997 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4000 /* Iterate over power-of-two block sizes from the maximum length to
4001 the least significant bit possibly set in the length. */
4002 for (int i = max_bits; i >= sctz_len; i--)
4004 rtx_code_label *label = NULL;
4005 blksize = HOST_WIDE_INT_1U << i;
4007 /* If we're past the bits shared between min_ and max_len, expand
4008 a test on the dynamic length, comparing it with the
4009 BLKSIZE. */
4010 if (i <= tst_bits)
4012 label = gen_label_rtx ();
4013 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4014 ptr_mode, 1, label,
4015 profile_probability::even ());
4017 /* If we are at a bit that is in the prefix shared by min_ and
4018 max_len, skip this BLKSIZE if the bit is clear. */
4019 else if ((max_len & blksize) == 0)
4020 continue;
4022 /* Issue a store of BLKSIZE bytes. */
4023 to = store_by_pieces (to, blksize,
4024 constfun, constfundata,
4025 align, true,
4026 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4028 /* Adjust REM and PTR, unless this is the last iteration. */
4029 if (i != sctz_len)
4031 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4032 to = replace_equiv_address (to, ptr);
4033 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4034 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4037 if (label)
4039 emit_label (label);
4041 /* Given conditional stores, the offset can no longer be
4042 known, so clear it. */
4043 clear_mem_offset (to);
4047 return true;
4050 /* Helper function to do the actual work for expand_builtin_memset. The
4051 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4052 so that this can also be called without constructing an actual CALL_EXPR.
4053 The other arguments and return value are the same as for
4054 expand_builtin_memset. */
4056 static rtx
4057 expand_builtin_memset_args (tree dest, tree val, tree len,
4058 rtx target, machine_mode mode, tree orig_exp)
4060 tree fndecl, fn;
4061 enum built_in_function fcode;
4062 machine_mode val_mode;
4063 char c;
4064 unsigned int dest_align;
4065 rtx dest_mem, dest_addr, len_rtx;
4066 HOST_WIDE_INT expected_size = -1;
4067 unsigned int expected_align = 0;
4068 unsigned HOST_WIDE_INT min_size;
4069 unsigned HOST_WIDE_INT max_size;
4070 unsigned HOST_WIDE_INT probable_max_size;
4072 dest_align = get_pointer_alignment (dest);
4074 /* If DEST is not a pointer type, don't do this operation in-line. */
4075 if (dest_align == 0)
4076 return NULL_RTX;
4078 if (currently_expanding_gimple_stmt)
4079 stringop_block_profile (currently_expanding_gimple_stmt,
4080 &expected_align, &expected_size);
4082 if (expected_align < dest_align)
4083 expected_align = dest_align;
4085 /* If the LEN parameter is zero, return DEST. */
4086 if (integer_zerop (len))
4088 /* Evaluate and ignore VAL in case it has side-effects. */
4089 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4090 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4093 /* Stabilize the arguments in case we fail. */
4094 dest = builtin_save_expr (dest);
4095 val = builtin_save_expr (val);
4096 len = builtin_save_expr (len);
4098 len_rtx = expand_normal (len);
4099 determine_block_size (len, len_rtx, &min_size, &max_size,
4100 &probable_max_size);
4101 dest_mem = get_memory_rtx (dest, len);
4102 val_mode = TYPE_MODE (unsigned_char_type_node);
4104 if (TREE_CODE (val) != INTEGER_CST
4105 || target_char_cast (val, &c))
4107 rtx val_rtx;
4109 val_rtx = expand_normal (val);
4110 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4112 /* Assume that we can memset by pieces if we can store
4113 * the coefficients by pieces (in the required modes).
4114 * We can't pass builtin_memset_gen_str as that emits RTL. */
4115 c = 1;
4116 if (tree_fits_uhwi_p (len)
4117 && can_store_by_pieces (tree_to_uhwi (len),
4118 builtin_memset_read_str, &c, dest_align,
4119 true))
4121 val_rtx = force_reg (val_mode, val_rtx);
4122 store_by_pieces (dest_mem, tree_to_uhwi (len),
4123 builtin_memset_gen_str, val_rtx, dest_align,
4124 true, RETURN_BEGIN);
4126 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4127 dest_align, expected_align,
4128 expected_size, min_size, max_size,
4129 probable_max_size)
4130 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4131 tree_ctz (len),
4132 min_size, max_size,
4133 val_rtx, 0,
4134 dest_align))
4135 goto do_libcall;
4137 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4138 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4139 return dest_mem;
4142 if (c)
4144 if (tree_fits_uhwi_p (len)
4145 && can_store_by_pieces (tree_to_uhwi (len),
4146 builtin_memset_read_str, &c, dest_align,
4147 true))
4148 store_by_pieces (dest_mem, tree_to_uhwi (len),
4149 builtin_memset_read_str, &c, dest_align, true,
4150 RETURN_BEGIN);
4151 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4152 gen_int_mode (c, val_mode),
4153 dest_align, expected_align,
4154 expected_size, min_size, max_size,
4155 probable_max_size)
4156 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4157 tree_ctz (len),
4158 min_size, max_size,
4159 NULL_RTX, c,
4160 dest_align))
4161 goto do_libcall;
4163 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4164 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4165 return dest_mem;
4168 set_mem_align (dest_mem, dest_align);
4169 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4170 CALL_EXPR_TAILCALL (orig_exp)
4171 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4172 expected_align, expected_size,
4173 min_size, max_size,
4174 probable_max_size, tree_ctz (len));
4176 if (dest_addr == 0)
4178 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4179 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4182 return dest_addr;
4184 do_libcall:
4185 fndecl = get_callee_fndecl (orig_exp);
4186 fcode = DECL_FUNCTION_CODE (fndecl);
4187 if (fcode == BUILT_IN_MEMSET)
4188 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4189 dest, val, len);
4190 else if (fcode == BUILT_IN_BZERO)
4191 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4192 dest, len);
4193 else
4194 gcc_unreachable ();
4195 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4196 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4197 return expand_call (fn, target, target == const0_rtx);
4200 /* Expand expression EXP, which is a call to the bzero builtin. Return
4201 NULL_RTX if we failed the caller should emit a normal call. */
4203 static rtx
4204 expand_builtin_bzero (tree exp)
4206 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4207 return NULL_RTX;
4209 tree dest = CALL_EXPR_ARG (exp, 0);
4210 tree size = CALL_EXPR_ARG (exp, 1);
4212 /* New argument list transforming bzero(ptr x, int y) to
4213 memset(ptr x, int 0, size_t y). This is done this way
4214 so that if it isn't expanded inline, we fallback to
4215 calling bzero instead of memset. */
4217 location_t loc = EXPR_LOCATION (exp);
4219 return expand_builtin_memset_args (dest, integer_zero_node,
4220 fold_convert_loc (loc,
4221 size_type_node, size),
4222 const0_rtx, VOIDmode, exp);
4225 /* Try to expand cmpstr operation ICODE with the given operands.
4226 Return the result rtx on success, otherwise return null. */
4228 static rtx
4229 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4230 HOST_WIDE_INT align)
4232 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4234 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4235 target = NULL_RTX;
4237 class expand_operand ops[4];
4238 create_output_operand (&ops[0], target, insn_mode);
4239 create_fixed_operand (&ops[1], arg1_rtx);
4240 create_fixed_operand (&ops[2], arg2_rtx);
4241 create_integer_operand (&ops[3], align);
4242 if (maybe_expand_insn (icode, 4, ops))
4243 return ops[0].value;
4244 return NULL_RTX;
4247 /* Expand expression EXP, which is a call to the memcmp built-in function.
4248 Return NULL_RTX if we failed and the caller should emit a normal call,
4249 otherwise try to get the result in TARGET, if convenient.
4250 RESULT_EQ is true if we can relax the returned value to be either zero
4251 or nonzero, without caring about the sign. */
4253 static rtx
4254 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4256 if (!validate_arglist (exp,
4257 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4258 return NULL_RTX;
4260 tree arg1 = CALL_EXPR_ARG (exp, 0);
4261 tree arg2 = CALL_EXPR_ARG (exp, 1);
4262 tree len = CALL_EXPR_ARG (exp, 2);
4264 /* Diagnose calls where the specified length exceeds the size of either
4265 object. */
4266 if (!check_read_access (exp, arg1, len, 0)
4267 || !check_read_access (exp, arg2, len, 0))
4268 return NULL_RTX;
4270 /* Due to the performance benefit, always inline the calls first
4271 when result_eq is false. */
4272 rtx result = NULL_RTX;
4273 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4274 if (!result_eq && fcode != BUILT_IN_BCMP)
4276 result = inline_expand_builtin_bytecmp (exp, target);
4277 if (result)
4278 return result;
4281 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4282 location_t loc = EXPR_LOCATION (exp);
4284 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4285 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4287 /* If we don't have POINTER_TYPE, call the function. */
4288 if (arg1_align == 0 || arg2_align == 0)
4289 return NULL_RTX;
4291 rtx arg1_rtx = get_memory_rtx (arg1, len);
4292 rtx arg2_rtx = get_memory_rtx (arg2, len);
4293 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4295 /* Set MEM_SIZE as appropriate. */
4296 if (CONST_INT_P (len_rtx))
4298 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4299 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4302 by_pieces_constfn constfn = NULL;
4304 /* Try to get the byte representation of the constant ARG2 (or, only
4305 when the function's result is used for equality to zero, ARG1)
4306 points to, with its byte size in NBYTES. */
4307 unsigned HOST_WIDE_INT nbytes;
4308 const char *rep = getbyterep (arg2, &nbytes);
4309 if (result_eq && rep == NULL)
4311 /* For equality to zero the arguments are interchangeable. */
4312 rep = getbyterep (arg1, &nbytes);
4313 if (rep != NULL)
4314 std::swap (arg1_rtx, arg2_rtx);
4317 /* If the function's constant bound LEN_RTX is less than or equal
4318 to the byte size of the representation of the constant argument,
4319 and if block move would be done by pieces, we can avoid loading
4320 the bytes from memory and only store the computed constant result. */
4321 if (rep
4322 && CONST_INT_P (len_rtx)
4323 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4324 constfn = builtin_memcpy_read_str;
4326 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4327 TREE_TYPE (len), target,
4328 result_eq, constfn,
4329 CONST_CAST (char *, rep));
4331 if (result)
4333 /* Return the value in the proper mode for this function. */
4334 if (GET_MODE (result) == mode)
4335 return result;
4337 if (target != 0)
4339 convert_move (target, result, 0);
4340 return target;
4343 return convert_to_mode (mode, result, 0);
4346 return NULL_RTX;
4349 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4350 if we failed the caller should emit a normal call, otherwise try to get
4351 the result in TARGET, if convenient. */
4353 static rtx
4354 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4356 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4357 return NULL_RTX;
4359 tree arg1 = CALL_EXPR_ARG (exp, 0);
4360 tree arg2 = CALL_EXPR_ARG (exp, 1);
4362 /* Due to the performance benefit, always inline the calls first. */
4363 rtx result = NULL_RTX;
4364 result = inline_expand_builtin_bytecmp (exp, target);
4365 if (result)
4366 return result;
4368 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4369 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4370 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4371 return NULL_RTX;
4373 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4374 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4376 /* If we don't have POINTER_TYPE, call the function. */
4377 if (arg1_align == 0 || arg2_align == 0)
4378 return NULL_RTX;
4380 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4381 arg1 = builtin_save_expr (arg1);
4382 arg2 = builtin_save_expr (arg2);
4384 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4385 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4387 /* Try to call cmpstrsi. */
4388 if (cmpstr_icode != CODE_FOR_nothing)
4389 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4390 MIN (arg1_align, arg2_align));
4392 /* Try to determine at least one length and call cmpstrnsi. */
4393 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4395 tree len;
4396 rtx arg3_rtx;
4398 tree len1 = c_strlen (arg1, 1);
4399 tree len2 = c_strlen (arg2, 1);
4401 if (len1)
4402 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4403 if (len2)
4404 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4406 /* If we don't have a constant length for the first, use the length
4407 of the second, if we know it. We don't require a constant for
4408 this case; some cost analysis could be done if both are available
4409 but neither is constant. For now, assume they're equally cheap,
4410 unless one has side effects. If both strings have constant lengths,
4411 use the smaller. */
4413 if (!len1)
4414 len = len2;
4415 else if (!len2)
4416 len = len1;
4417 else if (TREE_SIDE_EFFECTS (len1))
4418 len = len2;
4419 else if (TREE_SIDE_EFFECTS (len2))
4420 len = len1;
4421 else if (TREE_CODE (len1) != INTEGER_CST)
4422 len = len2;
4423 else if (TREE_CODE (len2) != INTEGER_CST)
4424 len = len1;
4425 else if (tree_int_cst_lt (len1, len2))
4426 len = len1;
4427 else
4428 len = len2;
4430 /* If both arguments have side effects, we cannot optimize. */
4431 if (len && !TREE_SIDE_EFFECTS (len))
4433 arg3_rtx = expand_normal (len);
4434 result = expand_cmpstrn_or_cmpmem
4435 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4436 arg3_rtx, MIN (arg1_align, arg2_align));
4440 tree fndecl = get_callee_fndecl (exp);
4441 if (result)
4443 /* Return the value in the proper mode for this function. */
4444 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4445 if (GET_MODE (result) == mode)
4446 return result;
4447 if (target == 0)
4448 return convert_to_mode (mode, result, 0);
4449 convert_move (target, result, 0);
4450 return target;
4453 /* Expand the library call ourselves using a stabilized argument
4454 list to avoid re-evaluating the function's arguments twice. */
4455 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4456 copy_warning (fn, exp);
4457 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4459 return expand_call (fn, target, target == const0_rtx);
4462 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4463 NULL_RTX if we failed the caller should emit a normal call, otherwise
4464 try to get the result in TARGET, if convenient. */
4466 static rtx
4467 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4468 ATTRIBUTE_UNUSED machine_mode mode)
4470 if (!validate_arglist (exp,
4471 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4472 return NULL_RTX;
4474 tree arg1 = CALL_EXPR_ARG (exp, 0);
4475 tree arg2 = CALL_EXPR_ARG (exp, 1);
4476 tree arg3 = CALL_EXPR_ARG (exp, 2);
4478 location_t loc = EXPR_LOCATION (exp);
4479 tree len1 = c_strlen (arg1, 1);
4480 tree len2 = c_strlen (arg2, 1);
4482 /* Due to the performance benefit, always inline the calls first. */
4483 rtx result = NULL_RTX;
4484 result = inline_expand_builtin_bytecmp (exp, target);
4485 if (result)
4486 return result;
4488 /* If c_strlen can determine an expression for one of the string
4489 lengths, and it doesn't have side effects, then emit cmpstrnsi
4490 using length MIN(strlen(string)+1, arg3). */
4491 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4492 if (cmpstrn_icode == CODE_FOR_nothing)
4493 return NULL_RTX;
4495 tree len;
4497 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4498 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4500 if (len1)
4501 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4502 if (len2)
4503 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4505 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4507 /* If we don't have a constant length for the first, use the length
4508 of the second, if we know it. If neither string is constant length,
4509 use the given length argument. We don't require a constant for
4510 this case; some cost analysis could be done if both are available
4511 but neither is constant. For now, assume they're equally cheap,
4512 unless one has side effects. If both strings have constant lengths,
4513 use the smaller. */
4515 if (!len1 && !len2)
4516 len = len3;
4517 else if (!len1)
4518 len = len2;
4519 else if (!len2)
4520 len = len1;
4521 else if (TREE_SIDE_EFFECTS (len1))
4522 len = len2;
4523 else if (TREE_SIDE_EFFECTS (len2))
4524 len = len1;
4525 else if (TREE_CODE (len1) != INTEGER_CST)
4526 len = len2;
4527 else if (TREE_CODE (len2) != INTEGER_CST)
4528 len = len1;
4529 else if (tree_int_cst_lt (len1, len2))
4530 len = len1;
4531 else
4532 len = len2;
4534 /* If we are not using the given length, we must incorporate it here.
4535 The actual new length parameter will be MIN(len,arg3) in this case. */
4536 if (len != len3)
4538 len = fold_convert_loc (loc, sizetype, len);
4539 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4541 rtx arg1_rtx = get_memory_rtx (arg1, len);
4542 rtx arg2_rtx = get_memory_rtx (arg2, len);
4543 rtx arg3_rtx = expand_normal (len);
4544 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4545 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4546 MIN (arg1_align, arg2_align));
4548 tree fndecl = get_callee_fndecl (exp);
4549 if (result)
4551 /* Return the value in the proper mode for this function. */
4552 mode = TYPE_MODE (TREE_TYPE (exp));
4553 if (GET_MODE (result) == mode)
4554 return result;
4555 if (target == 0)
4556 return convert_to_mode (mode, result, 0);
4557 convert_move (target, result, 0);
4558 return target;
4561 /* Expand the library call ourselves using a stabilized argument
4562 list to avoid re-evaluating the function's arguments twice. */
4563 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4564 copy_warning (call, exp);
4565 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4566 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4567 return expand_call (call, target, target == const0_rtx);
4570 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4571 if that's convenient. */
4574 expand_builtin_saveregs (void)
4576 rtx val;
4577 rtx_insn *seq;
4579 /* Don't do __builtin_saveregs more than once in a function.
4580 Save the result of the first call and reuse it. */
4581 if (saveregs_value != 0)
4582 return saveregs_value;
4584 /* When this function is called, it means that registers must be
4585 saved on entry to this function. So we migrate the call to the
4586 first insn of this function. */
4588 start_sequence ();
4590 /* Do whatever the machine needs done in this case. */
4591 val = targetm.calls.expand_builtin_saveregs ();
4593 seq = get_insns ();
4594 end_sequence ();
4596 saveregs_value = val;
4598 /* Put the insns after the NOTE that starts the function. If this
4599 is inside a start_sequence, make the outer-level insn chain current, so
4600 the code is placed at the start of the function. */
4601 push_topmost_sequence ();
4602 emit_insn_after (seq, entry_of_function ());
4603 pop_topmost_sequence ();
4605 return val;
4608 /* Expand a call to __builtin_next_arg. */
4610 static rtx
4611 expand_builtin_next_arg (void)
4613 /* Checking arguments is already done in fold_builtin_next_arg
4614 that must be called before this function. */
4615 return expand_binop (ptr_mode, add_optab,
4616 crtl->args.internal_arg_pointer,
4617 crtl->args.arg_offset_rtx,
4618 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4621 /* Make it easier for the backends by protecting the valist argument
4622 from multiple evaluations. */
4624 static tree
4625 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4627 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4629 /* The current way of determining the type of valist is completely
4630 bogus. We should have the information on the va builtin instead. */
4631 if (!vatype)
4632 vatype = targetm.fn_abi_va_list (cfun->decl);
4634 if (TREE_CODE (vatype) == ARRAY_TYPE)
4636 if (TREE_SIDE_EFFECTS (valist))
4637 valist = save_expr (valist);
4639 /* For this case, the backends will be expecting a pointer to
4640 vatype, but it's possible we've actually been given an array
4641 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4642 So fix it. */
4643 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4645 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4646 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4649 else
4651 tree pt = build_pointer_type (vatype);
4653 if (! needs_lvalue)
4655 if (! TREE_SIDE_EFFECTS (valist))
4656 return valist;
4658 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4659 TREE_SIDE_EFFECTS (valist) = 1;
4662 if (TREE_SIDE_EFFECTS (valist))
4663 valist = save_expr (valist);
4664 valist = fold_build2_loc (loc, MEM_REF,
4665 vatype, valist, build_int_cst (pt, 0));
4668 return valist;
4671 /* The "standard" definition of va_list is void*. */
4673 tree
4674 std_build_builtin_va_list (void)
4676 return ptr_type_node;
4679 /* The "standard" abi va_list is va_list_type_node. */
4681 tree
4682 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4684 return va_list_type_node;
4687 /* The "standard" type of va_list is va_list_type_node. */
4689 tree
4690 std_canonical_va_list_type (tree type)
4692 tree wtype, htype;
4694 wtype = va_list_type_node;
4695 htype = type;
4697 if (TREE_CODE (wtype) == ARRAY_TYPE)
4699 /* If va_list is an array type, the argument may have decayed
4700 to a pointer type, e.g. by being passed to another function.
4701 In that case, unwrap both types so that we can compare the
4702 underlying records. */
4703 if (TREE_CODE (htype) == ARRAY_TYPE
4704 || POINTER_TYPE_P (htype))
4706 wtype = TREE_TYPE (wtype);
4707 htype = TREE_TYPE (htype);
4710 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4711 return va_list_type_node;
4713 return NULL_TREE;
4716 /* The "standard" implementation of va_start: just assign `nextarg' to
4717 the variable. */
4719 void
4720 std_expand_builtin_va_start (tree valist, rtx nextarg)
4722 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4723 convert_move (va_r, nextarg, 0);
4726 /* Expand EXP, a call to __builtin_va_start. */
4728 static rtx
4729 expand_builtin_va_start (tree exp)
4731 rtx nextarg;
4732 tree valist;
4733 location_t loc = EXPR_LOCATION (exp);
4735 if (call_expr_nargs (exp) < 2)
4737 error_at (loc, "too few arguments to function %<va_start%>");
4738 return const0_rtx;
4741 if (fold_builtin_next_arg (exp, true))
4742 return const0_rtx;
4744 nextarg = expand_builtin_next_arg ();
4745 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4747 if (targetm.expand_builtin_va_start)
4748 targetm.expand_builtin_va_start (valist, nextarg);
4749 else
4750 std_expand_builtin_va_start (valist, nextarg);
4752 return const0_rtx;
4755 /* Expand EXP, a call to __builtin_va_end. */
4757 static rtx
4758 expand_builtin_va_end (tree exp)
4760 tree valist = CALL_EXPR_ARG (exp, 0);
4762 /* Evaluate for side effects, if needed. I hate macros that don't
4763 do that. */
4764 if (TREE_SIDE_EFFECTS (valist))
4765 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4767 return const0_rtx;
4770 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4771 builtin rather than just as an assignment in stdarg.h because of the
4772 nastiness of array-type va_list types. */
4774 static rtx
4775 expand_builtin_va_copy (tree exp)
4777 tree dst, src, t;
4778 location_t loc = EXPR_LOCATION (exp);
4780 dst = CALL_EXPR_ARG (exp, 0);
4781 src = CALL_EXPR_ARG (exp, 1);
4783 dst = stabilize_va_list_loc (loc, dst, 1);
4784 src = stabilize_va_list_loc (loc, src, 0);
4786 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4788 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4790 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4791 TREE_SIDE_EFFECTS (t) = 1;
4792 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4794 else
4796 rtx dstb, srcb, size;
4798 /* Evaluate to pointers. */
4799 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4800 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4801 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4802 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4804 dstb = convert_memory_address (Pmode, dstb);
4805 srcb = convert_memory_address (Pmode, srcb);
4807 /* "Dereference" to BLKmode memories. */
4808 dstb = gen_rtx_MEM (BLKmode, dstb);
4809 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4810 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4811 srcb = gen_rtx_MEM (BLKmode, srcb);
4812 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4813 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4815 /* Copy. */
4816 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4819 return const0_rtx;
4822 /* Expand a call to one of the builtin functions __builtin_frame_address or
4823 __builtin_return_address. */
4825 static rtx
4826 expand_builtin_frame_address (tree fndecl, tree exp)
4828 /* The argument must be a nonnegative integer constant.
4829 It counts the number of frames to scan up the stack.
4830 The value is either the frame pointer value or the return
4831 address saved in that frame. */
4832 if (call_expr_nargs (exp) == 0)
4833 /* Warning about missing arg was already issued. */
4834 return const0_rtx;
4835 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4837 error ("invalid argument to %qD", fndecl);
4838 return const0_rtx;
4840 else
4842 /* Number of frames to scan up the stack. */
4843 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4845 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4847 /* Some ports cannot access arbitrary stack frames. */
4848 if (tem == NULL)
4850 warning (0, "unsupported argument to %qD", fndecl);
4851 return const0_rtx;
4854 if (count)
4856 /* Warn since no effort is made to ensure that any frame
4857 beyond the current one exists or can be safely reached. */
4858 warning (OPT_Wframe_address, "calling %qD with "
4859 "a nonzero argument is unsafe", fndecl);
4862 /* For __builtin_frame_address, return what we've got. */
4863 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4864 return tem;
4866 if (!REG_P (tem)
4867 && ! CONSTANT_P (tem))
4868 tem = copy_addr_to_reg (tem);
4869 return tem;
4873 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4874 failed and the caller should emit a normal call. */
4876 static rtx
4877 expand_builtin_alloca (tree exp)
4879 rtx op0;
4880 rtx result;
4881 unsigned int align;
4882 tree fndecl = get_callee_fndecl (exp);
4883 HOST_WIDE_INT max_size;
4884 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4885 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4886 bool valid_arglist
4887 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4888 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4889 VOID_TYPE)
4890 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4891 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4892 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4894 if (!valid_arglist)
4895 return NULL_RTX;
4897 /* Compute the argument. */
4898 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4900 /* Compute the alignment. */
4901 align = (fcode == BUILT_IN_ALLOCA
4902 ? BIGGEST_ALIGNMENT
4903 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4905 /* Compute the maximum size. */
4906 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4907 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4908 : -1);
4910 /* Allocate the desired space. If the allocation stems from the declaration
4911 of a variable-sized object, it cannot accumulate. */
4912 result
4913 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4914 result = convert_memory_address (ptr_mode, result);
4916 /* Dynamic allocations for variables are recorded during gimplification. */
4917 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4918 record_dynamic_alloc (exp);
4920 return result;
4923 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
4924 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4925 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
4926 handle_builtin_stack_restore function. */
4928 static rtx
4929 expand_asan_emit_allocas_unpoison (tree exp)
4931 tree arg0 = CALL_EXPR_ARG (exp, 0);
4932 tree arg1 = CALL_EXPR_ARG (exp, 1);
4933 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4934 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4935 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
4936 stack_pointer_rtx, NULL_RTX, 0,
4937 OPTAB_LIB_WIDEN);
4938 off = convert_modes (ptr_mode, Pmode, off, 0);
4939 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
4940 OPTAB_LIB_WIDEN);
4941 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4942 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4943 top, ptr_mode, bot, ptr_mode);
4944 return ret;
4947 /* Expand a call to bswap builtin in EXP.
4948 Return NULL_RTX if a normal call should be emitted rather than expanding the
4949 function in-line. If convenient, the result should be placed in TARGET.
4950 SUBTARGET may be used as the target for computing one of EXP's operands. */
4952 static rtx
4953 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4954 rtx subtarget)
4956 tree arg;
4957 rtx op0;
4959 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4960 return NULL_RTX;
4962 arg = CALL_EXPR_ARG (exp, 0);
4963 op0 = expand_expr (arg,
4964 subtarget && GET_MODE (subtarget) == target_mode
4965 ? subtarget : NULL_RTX,
4966 target_mode, EXPAND_NORMAL);
4967 if (GET_MODE (op0) != target_mode)
4968 op0 = convert_to_mode (target_mode, op0, 1);
4970 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4972 gcc_assert (target);
4974 return convert_to_mode (target_mode, target, 1);
4977 /* Expand a call to a unary builtin in EXP.
4978 Return NULL_RTX if a normal call should be emitted rather than expanding the
4979 function in-line. If convenient, the result should be placed in TARGET.
4980 SUBTARGET may be used as the target for computing one of EXP's operands. */
4982 static rtx
4983 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4984 rtx subtarget, optab op_optab)
4986 rtx op0;
4988 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4989 return NULL_RTX;
4991 /* Compute the argument. */
4992 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4993 (subtarget
4994 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4995 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4996 VOIDmode, EXPAND_NORMAL);
4997 /* Compute op, into TARGET if possible.
4998 Set TARGET to wherever the result comes back. */
4999 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5000 op_optab, op0, target, op_optab != clrsb_optab);
5001 gcc_assert (target);
5003 return convert_to_mode (target_mode, target, 0);
5006 /* Expand a call to __builtin_expect. We just return our argument
5007 as the builtin_expect semantic should've been already executed by
5008 tree branch prediction pass. */
5010 static rtx
5011 expand_builtin_expect (tree exp, rtx target)
5013 tree arg;
5015 if (call_expr_nargs (exp) < 2)
5016 return const0_rtx;
5017 arg = CALL_EXPR_ARG (exp, 0);
5019 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5020 /* When guessing was done, the hints should be already stripped away. */
5021 gcc_assert (!flag_guess_branch_prob
5022 || optimize == 0 || seen_error ());
5023 return target;
5026 /* Expand a call to __builtin_expect_with_probability. We just return our
5027 argument as the builtin_expect semantic should've been already executed by
5028 tree branch prediction pass. */
5030 static rtx
5031 expand_builtin_expect_with_probability (tree exp, rtx target)
5033 tree arg;
5035 if (call_expr_nargs (exp) < 3)
5036 return const0_rtx;
5037 arg = CALL_EXPR_ARG (exp, 0);
5039 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5040 /* When guessing was done, the hints should be already stripped away. */
5041 gcc_assert (!flag_guess_branch_prob
5042 || optimize == 0 || seen_error ());
5043 return target;
5047 /* Expand a call to __builtin_assume_aligned. We just return our first
5048 argument as the builtin_assume_aligned semantic should've been already
5049 executed by CCP. */
5051 static rtx
5052 expand_builtin_assume_aligned (tree exp, rtx target)
5054 if (call_expr_nargs (exp) < 2)
5055 return const0_rtx;
5056 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5057 EXPAND_NORMAL);
5058 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5059 && (call_expr_nargs (exp) < 3
5060 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5061 return target;
5064 void
5065 expand_builtin_trap (void)
5067 if (targetm.have_trap ())
5069 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5070 /* For trap insns when not accumulating outgoing args force
5071 REG_ARGS_SIZE note to prevent crossjumping of calls with
5072 different args sizes. */
5073 if (!ACCUMULATE_OUTGOING_ARGS)
5074 add_args_size_note (insn, stack_pointer_delta);
5076 else
5078 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5079 tree call_expr = build_call_expr (fn, 0);
5080 expand_call (call_expr, NULL_RTX, false);
5083 emit_barrier ();
5086 /* Expand a call to __builtin_unreachable. We do nothing except emit
5087 a barrier saying that control flow will not pass here.
5089 It is the responsibility of the program being compiled to ensure
5090 that control flow does never reach __builtin_unreachable. */
5091 static void
5092 expand_builtin_unreachable (void)
5094 emit_barrier ();
5097 /* Expand EXP, a call to fabs, fabsf or fabsl.
5098 Return NULL_RTX if a normal call should be emitted rather than expanding
5099 the function inline. If convenient, the result should be placed
5100 in TARGET. SUBTARGET may be used as the target for computing
5101 the operand. */
5103 static rtx
5104 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5106 machine_mode mode;
5107 tree arg;
5108 rtx op0;
5110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5111 return NULL_RTX;
5113 arg = CALL_EXPR_ARG (exp, 0);
5114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5115 mode = TYPE_MODE (TREE_TYPE (arg));
5116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5117 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5120 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5121 Return NULL is a normal call should be emitted rather than expanding the
5122 function inline. If convenient, the result should be placed in TARGET.
5123 SUBTARGET may be used as the target for computing the operand. */
5125 static rtx
5126 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5128 rtx op0, op1;
5129 tree arg;
5131 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5132 return NULL_RTX;
5134 arg = CALL_EXPR_ARG (exp, 0);
5135 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5137 arg = CALL_EXPR_ARG (exp, 1);
5138 op1 = expand_normal (arg);
5140 return expand_copysign (op0, op1, target);
5143 /* Emit a call to __builtin___clear_cache. */
5145 void
5146 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5148 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5149 BUILTIN_ASM_NAME_PTR
5150 (BUILT_IN_CLEAR_CACHE));
5152 emit_library_call (callee,
5153 LCT_NORMAL, VOIDmode,
5154 convert_memory_address (ptr_mode, begin), ptr_mode,
5155 convert_memory_address (ptr_mode, end), ptr_mode);
5158 /* Emit a call to __builtin___clear_cache, unless the target specifies
5159 it as do-nothing. This function can be used by trampoline
5160 finalizers to duplicate the effects of expanding a call to the
5161 clear_cache builtin. */
5163 void
5164 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5166 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
5167 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
5169 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5170 return;
5173 if (targetm.have_clear_cache ())
5175 /* We have a "clear_cache" insn, and it will handle everything. */
5176 class expand_operand ops[2];
5178 create_address_operand (&ops[0], begin);
5179 create_address_operand (&ops[1], end);
5181 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5182 return;
5184 else
5186 #ifndef CLEAR_INSN_CACHE
5187 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5188 does nothing. There is no need to call it. Do nothing. */
5189 return;
5190 #endif /* CLEAR_INSN_CACHE */
5193 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5196 /* Expand a call to __builtin___clear_cache. */
5198 static void
5199 expand_builtin___clear_cache (tree exp)
5201 tree begin, end;
5202 rtx begin_rtx, end_rtx;
5204 /* We must not expand to a library call. If we did, any
5205 fallback library function in libgcc that might contain a call to
5206 __builtin___clear_cache() would recurse infinitely. */
5207 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5209 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5210 return;
5213 begin = CALL_EXPR_ARG (exp, 0);
5214 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5216 end = CALL_EXPR_ARG (exp, 1);
5217 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5219 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5222 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5224 static rtx
5225 round_trampoline_addr (rtx tramp)
5227 rtx temp, addend, mask;
5229 /* If we don't need too much alignment, we'll have been guaranteed
5230 proper alignment by get_trampoline_type. */
5231 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5232 return tramp;
5234 /* Round address up to desired boundary. */
5235 temp = gen_reg_rtx (Pmode);
5236 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5237 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5239 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5240 temp, 0, OPTAB_LIB_WIDEN);
5241 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5242 temp, 0, OPTAB_LIB_WIDEN);
5244 return tramp;
5247 static rtx
5248 expand_builtin_init_trampoline (tree exp, bool onstack)
5250 tree t_tramp, t_func, t_chain;
5251 rtx m_tramp, r_tramp, r_chain, tmp;
5253 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5254 POINTER_TYPE, VOID_TYPE))
5255 return NULL_RTX;
5257 t_tramp = CALL_EXPR_ARG (exp, 0);
5258 t_func = CALL_EXPR_ARG (exp, 1);
5259 t_chain = CALL_EXPR_ARG (exp, 2);
5261 r_tramp = expand_normal (t_tramp);
5262 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5263 MEM_NOTRAP_P (m_tramp) = 1;
5265 /* If ONSTACK, the TRAMP argument should be the address of a field
5266 within the local function's FRAME decl. Either way, let's see if
5267 we can fill in the MEM_ATTRs for this memory. */
5268 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5269 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5271 /* Creator of a heap trampoline is responsible for making sure the
5272 address is aligned to at least STACK_BOUNDARY. Normally malloc
5273 will ensure this anyhow. */
5274 tmp = round_trampoline_addr (r_tramp);
5275 if (tmp != r_tramp)
5277 m_tramp = change_address (m_tramp, BLKmode, tmp);
5278 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5279 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5282 /* The FUNC argument should be the address of the nested function.
5283 Extract the actual function decl to pass to the hook. */
5284 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5285 t_func = TREE_OPERAND (t_func, 0);
5286 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5288 r_chain = expand_normal (t_chain);
5290 /* Generate insns to initialize the trampoline. */
5291 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5293 if (onstack)
5295 trampolines_created = 1;
5297 if (targetm.calls.custom_function_descriptors != 0)
5298 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5299 "trampoline generated for nested function %qD", t_func);
5302 return const0_rtx;
5305 static rtx
5306 expand_builtin_adjust_trampoline (tree exp)
5308 rtx tramp;
5310 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5311 return NULL_RTX;
5313 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5314 tramp = round_trampoline_addr (tramp);
5315 if (targetm.calls.trampoline_adjust_address)
5316 tramp = targetm.calls.trampoline_adjust_address (tramp);
5318 return tramp;
5321 /* Expand a call to the builtin descriptor initialization routine.
5322 A descriptor is made up of a couple of pointers to the static
5323 chain and the code entry in this order. */
5325 static rtx
5326 expand_builtin_init_descriptor (tree exp)
5328 tree t_descr, t_func, t_chain;
5329 rtx m_descr, r_descr, r_func, r_chain;
5331 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5332 VOID_TYPE))
5333 return NULL_RTX;
5335 t_descr = CALL_EXPR_ARG (exp, 0);
5336 t_func = CALL_EXPR_ARG (exp, 1);
5337 t_chain = CALL_EXPR_ARG (exp, 2);
5339 r_descr = expand_normal (t_descr);
5340 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5341 MEM_NOTRAP_P (m_descr) = 1;
5342 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5344 r_func = expand_normal (t_func);
5345 r_chain = expand_normal (t_chain);
5347 /* Generate insns to initialize the descriptor. */
5348 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5349 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5350 POINTER_SIZE / BITS_PER_UNIT), r_func);
5352 return const0_rtx;
5355 /* Expand a call to the builtin descriptor adjustment routine. */
5357 static rtx
5358 expand_builtin_adjust_descriptor (tree exp)
5360 rtx tramp;
5362 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5363 return NULL_RTX;
5365 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5367 /* Unalign the descriptor to allow runtime identification. */
5368 tramp = plus_constant (ptr_mode, tramp,
5369 targetm.calls.custom_function_descriptors);
5371 return force_operand (tramp, NULL_RTX);
5374 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5375 function. The function first checks whether the back end provides
5376 an insn to implement signbit for the respective mode. If not, it
5377 checks whether the floating point format of the value is such that
5378 the sign bit can be extracted. If that is not the case, error out.
5379 EXP is the expression that is a call to the builtin function; if
5380 convenient, the result should be placed in TARGET. */
5381 static rtx
5382 expand_builtin_signbit (tree exp, rtx target)
5384 const struct real_format *fmt;
5385 scalar_float_mode fmode;
5386 scalar_int_mode rmode, imode;
5387 tree arg;
5388 int word, bitpos;
5389 enum insn_code icode;
5390 rtx temp;
5391 location_t loc = EXPR_LOCATION (exp);
5393 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5394 return NULL_RTX;
5396 arg = CALL_EXPR_ARG (exp, 0);
5397 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5398 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5399 fmt = REAL_MODE_FORMAT (fmode);
5401 arg = builtin_save_expr (arg);
5403 /* Expand the argument yielding a RTX expression. */
5404 temp = expand_normal (arg);
5406 /* Check if the back end provides an insn that handles signbit for the
5407 argument's mode. */
5408 icode = optab_handler (signbit_optab, fmode);
5409 if (icode != CODE_FOR_nothing)
5411 rtx_insn *last = get_last_insn ();
5412 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5413 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5414 return target;
5415 delete_insns_since (last);
5418 /* For floating point formats without a sign bit, implement signbit
5419 as "ARG < 0.0". */
5420 bitpos = fmt->signbit_ro;
5421 if (bitpos < 0)
5423 /* But we can't do this if the format supports signed zero. */
5424 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5426 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5427 build_real (TREE_TYPE (arg), dconst0));
5428 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5431 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5433 imode = int_mode_for_mode (fmode).require ();
5434 temp = gen_lowpart (imode, temp);
5436 else
5438 imode = word_mode;
5439 /* Handle targets with different FP word orders. */
5440 if (FLOAT_WORDS_BIG_ENDIAN)
5441 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5442 else
5443 word = bitpos / BITS_PER_WORD;
5444 temp = operand_subword_force (temp, word, fmode);
5445 bitpos = bitpos % BITS_PER_WORD;
5448 /* Force the intermediate word_mode (or narrower) result into a
5449 register. This avoids attempting to create paradoxical SUBREGs
5450 of floating point modes below. */
5451 temp = force_reg (imode, temp);
5453 /* If the bitpos is within the "result mode" lowpart, the operation
5454 can be implement with a single bitwise AND. Otherwise, we need
5455 a right shift and an AND. */
5457 if (bitpos < GET_MODE_BITSIZE (rmode))
5459 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5461 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5462 temp = gen_lowpart (rmode, temp);
5463 temp = expand_binop (rmode, and_optab, temp,
5464 immed_wide_int_const (mask, rmode),
5465 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5467 else
5469 /* Perform a logical right shift to place the signbit in the least
5470 significant bit, then truncate the result to the desired mode
5471 and mask just this bit. */
5472 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5473 temp = gen_lowpart (rmode, temp);
5474 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5475 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5478 return temp;
5481 /* Expand fork or exec calls. TARGET is the desired target of the
5482 call. EXP is the call. FN is the
5483 identificator of the actual function. IGNORE is nonzero if the
5484 value is to be ignored. */
5486 static rtx
5487 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5489 tree id, decl;
5490 tree call;
5492 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
5494 tree path = CALL_EXPR_ARG (exp, 0);
5495 /* Detect unterminated path. */
5496 if (!check_read_access (exp, path))
5497 return NULL_RTX;
5499 /* Also detect unterminated first argument. */
5500 switch (DECL_FUNCTION_CODE (fn))
5502 case BUILT_IN_EXECL:
5503 case BUILT_IN_EXECLE:
5504 case BUILT_IN_EXECLP:
5505 if (!check_read_access (exp, path))
5506 return NULL_RTX;
5507 default:
5508 break;
5513 /* If we are not profiling, just call the function. */
5514 if (!profile_arc_flag)
5515 return NULL_RTX;
5517 /* Otherwise call the wrapper. This should be equivalent for the rest of
5518 compiler, so the code does not diverge, and the wrapper may run the
5519 code necessary for keeping the profiling sane. */
5521 switch (DECL_FUNCTION_CODE (fn))
5523 case BUILT_IN_FORK:
5524 id = get_identifier ("__gcov_fork");
5525 break;
5527 case BUILT_IN_EXECL:
5528 id = get_identifier ("__gcov_execl");
5529 break;
5531 case BUILT_IN_EXECV:
5532 id = get_identifier ("__gcov_execv");
5533 break;
5535 case BUILT_IN_EXECLP:
5536 id = get_identifier ("__gcov_execlp");
5537 break;
5539 case BUILT_IN_EXECLE:
5540 id = get_identifier ("__gcov_execle");
5541 break;
5543 case BUILT_IN_EXECVP:
5544 id = get_identifier ("__gcov_execvp");
5545 break;
5547 case BUILT_IN_EXECVE:
5548 id = get_identifier ("__gcov_execve");
5549 break;
5551 default:
5552 gcc_unreachable ();
5555 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5556 FUNCTION_DECL, id, TREE_TYPE (fn));
5557 DECL_EXTERNAL (decl) = 1;
5558 TREE_PUBLIC (decl) = 1;
5559 DECL_ARTIFICIAL (decl) = 1;
5560 TREE_NOTHROW (decl) = 1;
5561 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5562 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5563 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5564 return expand_call (call, target, ignore);
5569 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5570 the pointer in these functions is void*, the tree optimizers may remove
5571 casts. The mode computed in expand_builtin isn't reliable either, due
5572 to __sync_bool_compare_and_swap.
5574 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5575 group of builtins. This gives us log2 of the mode size. */
5577 static inline machine_mode
5578 get_builtin_sync_mode (int fcode_diff)
5580 /* The size is not negotiable, so ask not to get BLKmode in return
5581 if the target indicates that a smaller size would be better. */
5582 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5585 /* Expand the memory expression LOC and return the appropriate memory operand
5586 for the builtin_sync operations. */
5588 static rtx
5589 get_builtin_sync_mem (tree loc, machine_mode mode)
5591 rtx addr, mem;
5592 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5593 ? TREE_TYPE (TREE_TYPE (loc))
5594 : TREE_TYPE (loc));
5595 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5597 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5598 addr = convert_memory_address (addr_mode, addr);
5600 /* Note that we explicitly do not want any alias information for this
5601 memory, so that we kill all other live memories. Otherwise we don't
5602 satisfy the full barrier semantics of the intrinsic. */
5603 mem = gen_rtx_MEM (mode, addr);
5605 set_mem_addr_space (mem, addr_space);
5607 mem = validize_mem (mem);
5609 /* The alignment needs to be at least according to that of the mode. */
5610 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5611 get_pointer_alignment (loc)));
5612 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5613 MEM_VOLATILE_P (mem) = 1;
5615 return mem;
5618 /* Make sure an argument is in the right mode.
5619 EXP is the tree argument.
5620 MODE is the mode it should be in. */
5622 static rtx
5623 expand_expr_force_mode (tree exp, machine_mode mode)
5625 rtx val;
5626 machine_mode old_mode;
5628 if (TREE_CODE (exp) == SSA_NAME
5629 && TYPE_MODE (TREE_TYPE (exp)) != mode)
5631 /* Undo argument promotion if possible, as combine might not
5632 be able to do it later due to MEM_VOLATILE_P uses in the
5633 patterns. */
5634 gimple *g = get_gimple_for_ssa_name (exp);
5635 if (g && gimple_assign_cast_p (g))
5637 tree rhs = gimple_assign_rhs1 (g);
5638 tree_code code = gimple_assign_rhs_code (g);
5639 if (CONVERT_EXPR_CODE_P (code)
5640 && TYPE_MODE (TREE_TYPE (rhs)) == mode
5641 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5642 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5643 && (TYPE_PRECISION (TREE_TYPE (exp))
5644 > TYPE_PRECISION (TREE_TYPE (rhs))))
5645 exp = rhs;
5649 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5650 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5651 of CONST_INTs, where we know the old_mode only from the call argument. */
5653 old_mode = GET_MODE (val);
5654 if (old_mode == VOIDmode)
5655 old_mode = TYPE_MODE (TREE_TYPE (exp));
5656 val = convert_modes (mode, old_mode, val, 1);
5657 return val;
5661 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5662 EXP is the CALL_EXPR. CODE is the rtx code
5663 that corresponds to the arithmetic or logical operation from the name;
5664 an exception here is that NOT actually means NAND. TARGET is an optional
5665 place for us to store the results; AFTER is true if this is the
5666 fetch_and_xxx form. */
5668 static rtx
5669 expand_builtin_sync_operation (machine_mode mode, tree exp,
5670 enum rtx_code code, bool after,
5671 rtx target)
5673 rtx val, mem;
5674 location_t loc = EXPR_LOCATION (exp);
5676 if (code == NOT && warn_sync_nand)
5678 tree fndecl = get_callee_fndecl (exp);
5679 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5681 static bool warned_f_a_n, warned_n_a_f;
5683 switch (fcode)
5685 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5686 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5687 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5688 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5689 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5690 if (warned_f_a_n)
5691 break;
5693 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5694 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5695 warned_f_a_n = true;
5696 break;
5698 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5699 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5700 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5701 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5702 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5703 if (warned_n_a_f)
5704 break;
5706 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5707 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5708 warned_n_a_f = true;
5709 break;
5711 default:
5712 gcc_unreachable ();
5716 /* Expand the operands. */
5717 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5718 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5720 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5721 after);
5724 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5725 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5726 true if this is the boolean form. TARGET is a place for us to store the
5727 results; this is NOT optional if IS_BOOL is true. */
5729 static rtx
5730 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5731 bool is_bool, rtx target)
5733 rtx old_val, new_val, mem;
5734 rtx *pbool, *poval;
5736 /* Expand the operands. */
5737 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5738 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5739 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5741 pbool = poval = NULL;
5742 if (target != const0_rtx)
5744 if (is_bool)
5745 pbool = &target;
5746 else
5747 poval = &target;
5749 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5750 false, MEMMODEL_SYNC_SEQ_CST,
5751 MEMMODEL_SYNC_SEQ_CST))
5752 return NULL_RTX;
5754 return target;
5757 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5758 general form is actually an atomic exchange, and some targets only
5759 support a reduced form with the second argument being a constant 1.
5760 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5761 the results. */
5763 static rtx
5764 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5765 rtx target)
5767 rtx val, mem;
5769 /* Expand the operands. */
5770 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5771 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5773 return expand_sync_lock_test_and_set (target, mem, val);
5776 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5778 static void
5779 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5781 rtx mem;
5783 /* Expand the operands. */
5784 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5786 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5789 /* Given an integer representing an ``enum memmodel'', verify its
5790 correctness and return the memory model enum. */
5792 static enum memmodel
5793 get_memmodel (tree exp)
5795 rtx op;
5796 unsigned HOST_WIDE_INT val;
5797 location_t loc
5798 = expansion_point_location_if_in_system_header (input_location);
5800 /* If the parameter is not a constant, it's a run time value so we'll just
5801 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5802 if (TREE_CODE (exp) != INTEGER_CST)
5803 return MEMMODEL_SEQ_CST;
5805 op = expand_normal (exp);
5807 val = INTVAL (op);
5808 if (targetm.memmodel_check)
5809 val = targetm.memmodel_check (val);
5810 else if (val & ~MEMMODEL_MASK)
5812 warning_at (loc, OPT_Winvalid_memory_model,
5813 "unknown architecture specifier in memory model to builtin");
5814 return MEMMODEL_SEQ_CST;
5817 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5818 if (memmodel_base (val) >= MEMMODEL_LAST)
5820 warning_at (loc, OPT_Winvalid_memory_model,
5821 "invalid memory model argument to builtin");
5822 return MEMMODEL_SEQ_CST;
5825 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5826 be conservative and promote consume to acquire. */
5827 if (val == MEMMODEL_CONSUME)
5828 val = MEMMODEL_ACQUIRE;
5830 return (enum memmodel) val;
5833 /* Expand the __atomic_exchange intrinsic:
5834 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5835 EXP is the CALL_EXPR.
5836 TARGET is an optional place for us to store the results. */
5838 static rtx
5839 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5841 rtx val, mem;
5842 enum memmodel model;
5844 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5846 if (!flag_inline_atomics)
5847 return NULL_RTX;
5849 /* Expand the operands. */
5850 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5851 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5853 return expand_atomic_exchange (target, mem, val, model);
5856 /* Expand the __atomic_compare_exchange intrinsic:
5857 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5858 TYPE desired, BOOL weak,
5859 enum memmodel success,
5860 enum memmodel failure)
5861 EXP is the CALL_EXPR.
5862 TARGET is an optional place for us to store the results. */
5864 static rtx
5865 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5866 rtx target)
5868 rtx expect, desired, mem, oldval;
5869 rtx_code_label *label;
5870 enum memmodel success, failure;
5871 tree weak;
5872 bool is_weak;
5873 location_t loc
5874 = expansion_point_location_if_in_system_header (input_location);
5876 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5877 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5879 if (failure > success)
5881 warning_at (loc, OPT_Winvalid_memory_model,
5882 "failure memory model cannot be stronger than success "
5883 "memory model for %<__atomic_compare_exchange%>");
5884 success = MEMMODEL_SEQ_CST;
5887 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5889 warning_at (loc, OPT_Winvalid_memory_model,
5890 "invalid failure memory model for "
5891 "%<__atomic_compare_exchange%>");
5892 failure = MEMMODEL_SEQ_CST;
5893 success = MEMMODEL_SEQ_CST;
5897 if (!flag_inline_atomics)
5898 return NULL_RTX;
5900 /* Expand the operands. */
5901 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5903 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5904 expect = convert_memory_address (Pmode, expect);
5905 expect = gen_rtx_MEM (mode, expect);
5906 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5908 weak = CALL_EXPR_ARG (exp, 3);
5909 is_weak = false;
5910 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5911 is_weak = true;
5913 if (target == const0_rtx)
5914 target = NULL;
5916 /* Lest the rtl backend create a race condition with an imporoper store
5917 to memory, always create a new pseudo for OLDVAL. */
5918 oldval = NULL;
5920 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5921 is_weak, success, failure))
5922 return NULL_RTX;
5924 /* Conditionally store back to EXPECT, lest we create a race condition
5925 with an improper store to memory. */
5926 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5927 the normal case where EXPECT is totally private, i.e. a register. At
5928 which point the store can be unconditional. */
5929 label = gen_label_rtx ();
5930 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5931 GET_MODE (target), 1, label);
5932 emit_move_insn (expect, oldval);
5933 emit_label (label);
5935 return target;
5938 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5939 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5940 call. The weak parameter must be dropped to match the expected parameter
5941 list and the expected argument changed from value to pointer to memory
5942 slot. */
5944 static void
5945 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5947 unsigned int z;
5948 vec<tree, va_gc> *vec;
5950 vec_alloc (vec, 5);
5951 vec->quick_push (gimple_call_arg (call, 0));
5952 tree expected = gimple_call_arg (call, 1);
5953 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5954 TREE_TYPE (expected));
5955 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5956 if (expd != x)
5957 emit_move_insn (x, expd);
5958 tree v = make_tree (TREE_TYPE (expected), x);
5959 vec->quick_push (build1 (ADDR_EXPR,
5960 build_pointer_type (TREE_TYPE (expected)), v));
5961 vec->quick_push (gimple_call_arg (call, 2));
5962 /* Skip the boolean weak parameter. */
5963 for (z = 4; z < 6; z++)
5964 vec->quick_push (gimple_call_arg (call, z));
5965 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5966 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5967 gcc_assert (bytes_log2 < 5);
5968 built_in_function fncode
5969 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5970 + bytes_log2);
5971 tree fndecl = builtin_decl_explicit (fncode);
5972 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5973 fndecl);
5974 tree exp = build_call_vec (boolean_type_node, fn, vec);
5975 tree lhs = gimple_call_lhs (call);
5976 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5977 if (lhs)
5979 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5980 if (GET_MODE (boolret) != mode)
5981 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5982 x = force_reg (mode, x);
5983 write_complex_part (target, boolret, true);
5984 write_complex_part (target, x, false);
5988 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5990 void
5991 expand_ifn_atomic_compare_exchange (gcall *call)
5993 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5994 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5995 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5996 rtx expect, desired, mem, oldval, boolret;
5997 enum memmodel success, failure;
5998 tree lhs;
5999 bool is_weak;
6000 location_t loc
6001 = expansion_point_location_if_in_system_header (gimple_location (call));
6003 success = get_memmodel (gimple_call_arg (call, 4));
6004 failure = get_memmodel (gimple_call_arg (call, 5));
6006 if (failure > success)
6008 warning_at (loc, OPT_Winvalid_memory_model,
6009 "failure memory model cannot be stronger than success "
6010 "memory model for %<__atomic_compare_exchange%>");
6011 success = MEMMODEL_SEQ_CST;
6014 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6016 warning_at (loc, OPT_Winvalid_memory_model,
6017 "invalid failure memory model for "
6018 "%<__atomic_compare_exchange%>");
6019 failure = MEMMODEL_SEQ_CST;
6020 success = MEMMODEL_SEQ_CST;
6023 if (!flag_inline_atomics)
6025 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6026 return;
6029 /* Expand the operands. */
6030 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6032 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6033 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6035 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6037 boolret = NULL;
6038 oldval = NULL;
6040 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6041 is_weak, success, failure))
6043 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6044 return;
6047 lhs = gimple_call_lhs (call);
6048 if (lhs)
6050 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6051 if (GET_MODE (boolret) != mode)
6052 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6053 write_complex_part (target, boolret, true);
6054 write_complex_part (target, oldval, false);
6058 /* Expand the __atomic_load intrinsic:
6059 TYPE __atomic_load (TYPE *object, enum memmodel)
6060 EXP is the CALL_EXPR.
6061 TARGET is an optional place for us to store the results. */
6063 static rtx
6064 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6066 rtx mem;
6067 enum memmodel model;
6069 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6070 if (is_mm_release (model) || is_mm_acq_rel (model))
6072 location_t loc
6073 = expansion_point_location_if_in_system_header (input_location);
6074 warning_at (loc, OPT_Winvalid_memory_model,
6075 "invalid memory model for %<__atomic_load%>");
6076 model = MEMMODEL_SEQ_CST;
6079 if (!flag_inline_atomics)
6080 return NULL_RTX;
6082 /* Expand the operand. */
6083 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6085 return expand_atomic_load (target, mem, model);
6089 /* Expand the __atomic_store intrinsic:
6090 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6091 EXP is the CALL_EXPR.
6092 TARGET is an optional place for us to store the results. */
6094 static rtx
6095 expand_builtin_atomic_store (machine_mode mode, tree exp)
6097 rtx mem, val;
6098 enum memmodel model;
6100 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6101 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6102 || is_mm_release (model)))
6104 location_t loc
6105 = expansion_point_location_if_in_system_header (input_location);
6106 warning_at (loc, OPT_Winvalid_memory_model,
6107 "invalid memory model for %<__atomic_store%>");
6108 model = MEMMODEL_SEQ_CST;
6111 if (!flag_inline_atomics)
6112 return NULL_RTX;
6114 /* Expand the operands. */
6115 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6116 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6118 return expand_atomic_store (mem, val, model, false);
6121 /* Expand the __atomic_fetch_XXX intrinsic:
6122 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6123 EXP is the CALL_EXPR.
6124 TARGET is an optional place for us to store the results.
6125 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6126 FETCH_AFTER is true if returning the result of the operation.
6127 FETCH_AFTER is false if returning the value before the operation.
6128 IGNORE is true if the result is not used.
6129 EXT_CALL is the correct builtin for an external call if this cannot be
6130 resolved to an instruction sequence. */
6132 static rtx
6133 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6134 enum rtx_code code, bool fetch_after,
6135 bool ignore, enum built_in_function ext_call)
6137 rtx val, mem, ret;
6138 enum memmodel model;
6139 tree fndecl;
6140 tree addr;
6142 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6144 /* Expand the operands. */
6145 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6146 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6148 /* Only try generating instructions if inlining is turned on. */
6149 if (flag_inline_atomics)
6151 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6152 if (ret)
6153 return ret;
6156 /* Return if a different routine isn't needed for the library call. */
6157 if (ext_call == BUILT_IN_NONE)
6158 return NULL_RTX;
6160 /* Change the call to the specified function. */
6161 fndecl = get_callee_fndecl (exp);
6162 addr = CALL_EXPR_FN (exp);
6163 STRIP_NOPS (addr);
6165 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6166 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6168 /* If we will emit code after the call, the call cannot be a tail call.
6169 If it is emitted as a tail call, a barrier is emitted after it, and
6170 then all trailing code is removed. */
6171 if (!ignore)
6172 CALL_EXPR_TAILCALL (exp) = 0;
6174 /* Expand the call here so we can emit trailing code. */
6175 ret = expand_call (exp, target, ignore);
6177 /* Replace the original function just in case it matters. */
6178 TREE_OPERAND (addr, 0) = fndecl;
6180 /* Then issue the arithmetic correction to return the right result. */
6181 if (!ignore)
6183 if (code == NOT)
6185 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6186 OPTAB_LIB_WIDEN);
6187 ret = expand_simple_unop (mode, NOT, ret, target, true);
6189 else
6190 ret = expand_simple_binop (mode, code, ret, val, target, true,
6191 OPTAB_LIB_WIDEN);
6193 return ret;
6196 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6198 void
6199 expand_ifn_atomic_bit_test_and (gcall *call)
6201 tree ptr = gimple_call_arg (call, 0);
6202 tree bit = gimple_call_arg (call, 1);
6203 tree flag = gimple_call_arg (call, 2);
6204 tree lhs = gimple_call_lhs (call);
6205 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6206 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6207 enum rtx_code code;
6208 optab optab;
6209 class expand_operand ops[5];
6211 gcc_assert (flag_inline_atomics);
6213 if (gimple_call_num_args (call) == 4)
6214 model = get_memmodel (gimple_call_arg (call, 3));
6216 rtx mem = get_builtin_sync_mem (ptr, mode);
6217 rtx val = expand_expr_force_mode (bit, mode);
6219 switch (gimple_call_internal_fn (call))
6221 case IFN_ATOMIC_BIT_TEST_AND_SET:
6222 code = IOR;
6223 optab = atomic_bit_test_and_set_optab;
6224 break;
6225 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6226 code = XOR;
6227 optab = atomic_bit_test_and_complement_optab;
6228 break;
6229 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6230 code = AND;
6231 optab = atomic_bit_test_and_reset_optab;
6232 break;
6233 default:
6234 gcc_unreachable ();
6237 if (lhs == NULL_TREE)
6239 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6240 val, NULL_RTX, true, OPTAB_DIRECT);
6241 if (code == AND)
6242 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6243 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6244 return;
6247 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6248 enum insn_code icode = direct_optab_handler (optab, mode);
6249 gcc_assert (icode != CODE_FOR_nothing);
6250 create_output_operand (&ops[0], target, mode);
6251 create_fixed_operand (&ops[1], mem);
6252 create_convert_operand_to (&ops[2], val, mode, true);
6253 create_integer_operand (&ops[3], model);
6254 create_integer_operand (&ops[4], integer_onep (flag));
6255 if (maybe_expand_insn (icode, 5, ops))
6256 return;
6258 rtx bitval = val;
6259 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6260 val, NULL_RTX, true, OPTAB_DIRECT);
6261 rtx maskval = val;
6262 if (code == AND)
6263 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6264 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6265 code, model, false);
6266 if (integer_onep (flag))
6268 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6269 NULL_RTX, true, OPTAB_DIRECT);
6270 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6271 true, OPTAB_DIRECT);
6273 else
6274 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6275 OPTAB_DIRECT);
6276 if (result != target)
6277 emit_move_insn (target, result);
6280 /* Expand an atomic clear operation.
6281 void _atomic_clear (BOOL *obj, enum memmodel)
6282 EXP is the call expression. */
6284 static rtx
6285 expand_builtin_atomic_clear (tree exp)
6287 machine_mode mode;
6288 rtx mem, ret;
6289 enum memmodel model;
6291 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6292 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6293 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6295 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6297 location_t loc
6298 = expansion_point_location_if_in_system_header (input_location);
6299 warning_at (loc, OPT_Winvalid_memory_model,
6300 "invalid memory model for %<__atomic_store%>");
6301 model = MEMMODEL_SEQ_CST;
6304 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6305 Failing that, a store is issued by __atomic_store. The only way this can
6306 fail is if the bool type is larger than a word size. Unlikely, but
6307 handle it anyway for completeness. Assume a single threaded model since
6308 there is no atomic support in this case, and no barriers are required. */
6309 ret = expand_atomic_store (mem, const0_rtx, model, true);
6310 if (!ret)
6311 emit_move_insn (mem, const0_rtx);
6312 return const0_rtx;
6315 /* Expand an atomic test_and_set operation.
6316 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6317 EXP is the call expression. */
6319 static rtx
6320 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6322 rtx mem;
6323 enum memmodel model;
6324 machine_mode mode;
6326 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6327 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6328 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6330 return expand_atomic_test_and_set (target, mem, model);
6334 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6335 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6337 static tree
6338 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6340 int size;
6341 machine_mode mode;
6342 unsigned int mode_align, type_align;
6344 if (TREE_CODE (arg0) != INTEGER_CST)
6345 return NULL_TREE;
6347 /* We need a corresponding integer mode for the access to be lock-free. */
6348 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6349 if (!int_mode_for_size (size, 0).exists (&mode))
6350 return boolean_false_node;
6352 mode_align = GET_MODE_ALIGNMENT (mode);
6354 if (TREE_CODE (arg1) == INTEGER_CST)
6356 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6358 /* Either this argument is null, or it's a fake pointer encoding
6359 the alignment of the object. */
6360 val = least_bit_hwi (val);
6361 val *= BITS_PER_UNIT;
6363 if (val == 0 || mode_align < val)
6364 type_align = mode_align;
6365 else
6366 type_align = val;
6368 else
6370 tree ttype = TREE_TYPE (arg1);
6372 /* This function is usually invoked and folded immediately by the front
6373 end before anything else has a chance to look at it. The pointer
6374 parameter at this point is usually cast to a void *, so check for that
6375 and look past the cast. */
6376 if (CONVERT_EXPR_P (arg1)
6377 && POINTER_TYPE_P (ttype)
6378 && VOID_TYPE_P (TREE_TYPE (ttype))
6379 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6380 arg1 = TREE_OPERAND (arg1, 0);
6382 ttype = TREE_TYPE (arg1);
6383 gcc_assert (POINTER_TYPE_P (ttype));
6385 /* Get the underlying type of the object. */
6386 ttype = TREE_TYPE (ttype);
6387 type_align = TYPE_ALIGN (ttype);
6390 /* If the object has smaller alignment, the lock free routines cannot
6391 be used. */
6392 if (type_align < mode_align)
6393 return boolean_false_node;
6395 /* Check if a compare_and_swap pattern exists for the mode which represents
6396 the required size. The pattern is not allowed to fail, so the existence
6397 of the pattern indicates support is present. Also require that an
6398 atomic load exists for the required size. */
6399 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6400 return boolean_true_node;
6401 else
6402 return boolean_false_node;
6405 /* Return true if the parameters to call EXP represent an object which will
6406 always generate lock free instructions. The first argument represents the
6407 size of the object, and the second parameter is a pointer to the object
6408 itself. If NULL is passed for the object, then the result is based on
6409 typical alignment for an object of the specified size. Otherwise return
6410 false. */
6412 static rtx
6413 expand_builtin_atomic_always_lock_free (tree exp)
6415 tree size;
6416 tree arg0 = CALL_EXPR_ARG (exp, 0);
6417 tree arg1 = CALL_EXPR_ARG (exp, 1);
6419 if (TREE_CODE (arg0) != INTEGER_CST)
6421 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6422 return const0_rtx;
6425 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6426 if (size == boolean_true_node)
6427 return const1_rtx;
6428 return const0_rtx;
6431 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6432 is lock free on this architecture. */
6434 static tree
6435 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6437 if (!flag_inline_atomics)
6438 return NULL_TREE;
6440 /* If it isn't always lock free, don't generate a result. */
6441 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6442 return boolean_true_node;
6444 return NULL_TREE;
6447 /* Return true if the parameters to call EXP represent an object which will
6448 always generate lock free instructions. The first argument represents the
6449 size of the object, and the second parameter is a pointer to the object
6450 itself. If NULL is passed for the object, then the result is based on
6451 typical alignment for an object of the specified size. Otherwise return
6452 NULL*/
6454 static rtx
6455 expand_builtin_atomic_is_lock_free (tree exp)
6457 tree size;
6458 tree arg0 = CALL_EXPR_ARG (exp, 0);
6459 tree arg1 = CALL_EXPR_ARG (exp, 1);
6461 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6463 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6464 return NULL_RTX;
6467 if (!flag_inline_atomics)
6468 return NULL_RTX;
6470 /* If the value is known at compile time, return the RTX for it. */
6471 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6472 if (size == boolean_true_node)
6473 return const1_rtx;
6475 return NULL_RTX;
6478 /* Expand the __atomic_thread_fence intrinsic:
6479 void __atomic_thread_fence (enum memmodel)
6480 EXP is the CALL_EXPR. */
6482 static void
6483 expand_builtin_atomic_thread_fence (tree exp)
6485 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6486 expand_mem_thread_fence (model);
6489 /* Expand the __atomic_signal_fence intrinsic:
6490 void __atomic_signal_fence (enum memmodel)
6491 EXP is the CALL_EXPR. */
6493 static void
6494 expand_builtin_atomic_signal_fence (tree exp)
6496 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6497 expand_mem_signal_fence (model);
6500 /* Expand the __sync_synchronize intrinsic. */
6502 static void
6503 expand_builtin_sync_synchronize (void)
6505 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6508 static rtx
6509 expand_builtin_thread_pointer (tree exp, rtx target)
6511 enum insn_code icode;
6512 if (!validate_arglist (exp, VOID_TYPE))
6513 return const0_rtx;
6514 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6515 if (icode != CODE_FOR_nothing)
6517 class expand_operand op;
6518 /* If the target is not sutitable then create a new target. */
6519 if (target == NULL_RTX
6520 || !REG_P (target)
6521 || GET_MODE (target) != Pmode)
6522 target = gen_reg_rtx (Pmode);
6523 create_output_operand (&op, target, Pmode);
6524 expand_insn (icode, 1, &op);
6525 return target;
6527 error ("%<__builtin_thread_pointer%> is not supported on this target");
6528 return const0_rtx;
6531 static void
6532 expand_builtin_set_thread_pointer (tree exp)
6534 enum insn_code icode;
6535 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6536 return;
6537 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6538 if (icode != CODE_FOR_nothing)
6540 class expand_operand op;
6541 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6542 Pmode, EXPAND_NORMAL);
6543 create_input_operand (&op, val, Pmode);
6544 expand_insn (icode, 1, &op);
6545 return;
6547 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6551 /* Emit code to restore the current value of stack. */
6553 static void
6554 expand_stack_restore (tree var)
6556 rtx_insn *prev;
6557 rtx sa = expand_normal (var);
6559 sa = convert_memory_address (Pmode, sa);
6561 prev = get_last_insn ();
6562 emit_stack_restore (SAVE_BLOCK, sa);
6564 record_new_stack_level ();
6566 fixup_args_size_notes (prev, get_last_insn (), 0);
6569 /* Emit code to save the current value of stack. */
6571 static rtx
6572 expand_stack_save (void)
6574 rtx ret = NULL_RTX;
6576 emit_stack_save (SAVE_BLOCK, &ret);
6577 return ret;
6580 /* Emit code to get the openacc gang, worker or vector id or size. */
6582 static rtx
6583 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6585 const char *name;
6586 rtx fallback_retval;
6587 rtx_insn *(*gen_fn) (rtx, rtx);
6588 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6590 case BUILT_IN_GOACC_PARLEVEL_ID:
6591 name = "__builtin_goacc_parlevel_id";
6592 fallback_retval = const0_rtx;
6593 gen_fn = targetm.gen_oacc_dim_pos;
6594 break;
6595 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6596 name = "__builtin_goacc_parlevel_size";
6597 fallback_retval = const1_rtx;
6598 gen_fn = targetm.gen_oacc_dim_size;
6599 break;
6600 default:
6601 gcc_unreachable ();
6604 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6606 error ("%qs only supported in OpenACC code", name);
6607 return const0_rtx;
6610 tree arg = CALL_EXPR_ARG (exp, 0);
6611 if (TREE_CODE (arg) != INTEGER_CST)
6613 error ("non-constant argument 0 to %qs", name);
6614 return const0_rtx;
6617 int dim = TREE_INT_CST_LOW (arg);
6618 switch (dim)
6620 case GOMP_DIM_GANG:
6621 case GOMP_DIM_WORKER:
6622 case GOMP_DIM_VECTOR:
6623 break;
6624 default:
6625 error ("illegal argument 0 to %qs", name);
6626 return const0_rtx;
6629 if (ignore)
6630 return target;
6632 if (target == NULL_RTX)
6633 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6635 if (!targetm.have_oacc_dim_size ())
6637 emit_move_insn (target, fallback_retval);
6638 return target;
6641 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6642 emit_insn (gen_fn (reg, GEN_INT (dim)));
6643 if (reg != target)
6644 emit_move_insn (target, reg);
6646 return target;
6649 /* Expand a string compare operation using a sequence of char comparison
6650 to get rid of the calling overhead, with result going to TARGET if
6651 that's convenient.
6653 VAR_STR is the variable string source;
6654 CONST_STR is the constant string source;
6655 LENGTH is the number of chars to compare;
6656 CONST_STR_N indicates which source string is the constant string;
6657 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6659 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6661 target = (int) (unsigned char) var_str[0]
6662 - (int) (unsigned char) const_str[0];
6663 if (target != 0)
6664 goto ne_label;
6666 target = (int) (unsigned char) var_str[length - 2]
6667 - (int) (unsigned char) const_str[length - 2];
6668 if (target != 0)
6669 goto ne_label;
6670 target = (int) (unsigned char) var_str[length - 1]
6671 - (int) (unsigned char) const_str[length - 1];
6672 ne_label:
6675 static rtx
6676 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6677 unsigned HOST_WIDE_INT length,
6678 int const_str_n, machine_mode mode)
6680 HOST_WIDE_INT offset = 0;
6681 rtx var_rtx_array
6682 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6683 rtx var_rtx = NULL_RTX;
6684 rtx const_rtx = NULL_RTX;
6685 rtx result = target ? target : gen_reg_rtx (mode);
6686 rtx_code_label *ne_label = gen_label_rtx ();
6687 tree unit_type_node = unsigned_char_type_node;
6688 scalar_int_mode unit_mode
6689 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6691 start_sequence ();
6693 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6695 var_rtx
6696 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6697 const_rtx = c_readstr (const_str + offset, unit_mode);
6698 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6699 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6701 op0 = convert_modes (mode, unit_mode, op0, 1);
6702 op1 = convert_modes (mode, unit_mode, op1, 1);
6703 result = expand_simple_binop (mode, MINUS, op0, op1,
6704 result, 1, OPTAB_WIDEN);
6705 if (i < length - 1)
6706 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6707 mode, true, ne_label);
6708 offset += GET_MODE_SIZE (unit_mode);
6711 emit_label (ne_label);
6712 rtx_insn *insns = get_insns ();
6713 end_sequence ();
6714 emit_insn (insns);
6716 return result;
6719 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6720 to TARGET if that's convenient.
6721 If the call is not been inlined, return NULL_RTX. */
6723 static rtx
6724 inline_expand_builtin_bytecmp (tree exp, rtx target)
6726 tree fndecl = get_callee_fndecl (exp);
6727 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6728 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6730 /* Do NOT apply this inlining expansion when optimizing for size or
6731 optimization level below 2. */
6732 if (optimize < 2 || optimize_insn_for_size_p ())
6733 return NULL_RTX;
6735 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6736 || fcode == BUILT_IN_STRNCMP
6737 || fcode == BUILT_IN_MEMCMP);
6739 /* On a target where the type of the call (int) has same or narrower presicion
6740 than unsigned char, give up the inlining expansion. */
6741 if (TYPE_PRECISION (unsigned_char_type_node)
6742 >= TYPE_PRECISION (TREE_TYPE (exp)))
6743 return NULL_RTX;
6745 tree arg1 = CALL_EXPR_ARG (exp, 0);
6746 tree arg2 = CALL_EXPR_ARG (exp, 1);
6747 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6749 unsigned HOST_WIDE_INT len1 = 0;
6750 unsigned HOST_WIDE_INT len2 = 0;
6751 unsigned HOST_WIDE_INT len3 = 0;
6753 /* Get the object representation of the initializers of ARG1 and ARG2
6754 as strings, provided they refer to constant objects, with their byte
6755 sizes in LEN1 and LEN2, respectively. */
6756 const char *bytes1 = getbyterep (arg1, &len1);
6757 const char *bytes2 = getbyterep (arg2, &len2);
6759 /* Fail if neither argument refers to an initialized constant. */
6760 if (!bytes1 && !bytes2)
6761 return NULL_RTX;
6763 if (is_ncmp)
6765 /* Fail if the memcmp/strncmp bound is not a constant. */
6766 if (!tree_fits_uhwi_p (len3_tree))
6767 return NULL_RTX;
6769 len3 = tree_to_uhwi (len3_tree);
6771 if (fcode == BUILT_IN_MEMCMP)
6773 /* Fail if the memcmp bound is greater than the size of either
6774 of the two constant objects. */
6775 if ((bytes1 && len1 < len3)
6776 || (bytes2 && len2 < len3))
6777 return NULL_RTX;
6781 if (fcode != BUILT_IN_MEMCMP)
6783 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6784 and LEN2 to the length of the nul-terminated string stored
6785 in each. */
6786 if (bytes1 != NULL)
6787 len1 = strnlen (bytes1, len1) + 1;
6788 if (bytes2 != NULL)
6789 len2 = strnlen (bytes2, len2) + 1;
6792 /* See inline_string_cmp. */
6793 int const_str_n;
6794 if (!len1)
6795 const_str_n = 2;
6796 else if (!len2)
6797 const_str_n = 1;
6798 else if (len2 > len1)
6799 const_str_n = 1;
6800 else
6801 const_str_n = 2;
6803 /* For strncmp only, compute the new bound as the smallest of
6804 the lengths of the two strings (plus 1) and the bound provided
6805 to the function. */
6806 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6807 if (is_ncmp && len3 < bound)
6808 bound = len3;
6810 /* If the bound of the comparison is larger than the threshold,
6811 do nothing. */
6812 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6813 return NULL_RTX;
6815 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6817 /* Now, start inline expansion the call. */
6818 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6819 (const_str_n == 1) ? bytes1 : bytes2, bound,
6820 const_str_n, mode);
6823 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6824 represents the size of the first argument to that call, or VOIDmode
6825 if the argument is a pointer. IGNORE will be true if the result
6826 isn't used. */
6827 static rtx
6828 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6829 bool ignore)
6831 rtx val, failsafe;
6832 unsigned nargs = call_expr_nargs (exp);
6834 tree arg0 = CALL_EXPR_ARG (exp, 0);
6836 if (mode == VOIDmode)
6838 mode = TYPE_MODE (TREE_TYPE (arg0));
6839 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6842 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6844 /* An optional second argument can be used as a failsafe value on
6845 some machines. If it isn't present, then the failsafe value is
6846 assumed to be 0. */
6847 if (nargs > 1)
6849 tree arg1 = CALL_EXPR_ARG (exp, 1);
6850 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6852 else
6853 failsafe = const0_rtx;
6855 /* If the result isn't used, the behavior is undefined. It would be
6856 nice to emit a warning here, but path splitting means this might
6857 happen with legitimate code. So simply drop the builtin
6858 expansion in that case; we've handled any side-effects above. */
6859 if (ignore)
6860 return const0_rtx;
6862 /* If we don't have a suitable target, create one to hold the result. */
6863 if (target == NULL || GET_MODE (target) != mode)
6864 target = gen_reg_rtx (mode);
6866 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6867 val = convert_modes (mode, VOIDmode, val, false);
6869 return targetm.speculation_safe_value (mode, target, val, failsafe);
6872 /* Expand an expression EXP that calls a built-in function,
6873 with result going to TARGET if that's convenient
6874 (and in mode MODE if that's convenient).
6875 SUBTARGET may be used as the target for computing one of EXP's operands.
6876 IGNORE is nonzero if the value is to be ignored. */
6879 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6880 int ignore)
6882 tree fndecl = get_callee_fndecl (exp);
6883 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6884 int flags;
6886 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6887 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6889 /* When ASan is enabled, we don't want to expand some memory/string
6890 builtins and rely on libsanitizer's hooks. This allows us to avoid
6891 redundant checks and be sure, that possible overflow will be detected
6892 by ASan. */
6894 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6895 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6896 return expand_call (exp, target, ignore);
6898 /* When not optimizing, generate calls to library functions for a certain
6899 set of builtins. */
6900 if (!optimize
6901 && !called_as_built_in (fndecl)
6902 && fcode != BUILT_IN_FORK
6903 && fcode != BUILT_IN_EXECL
6904 && fcode != BUILT_IN_EXECV
6905 && fcode != BUILT_IN_EXECLP
6906 && fcode != BUILT_IN_EXECLE
6907 && fcode != BUILT_IN_EXECVP
6908 && fcode != BUILT_IN_EXECVE
6909 && fcode != BUILT_IN_CLEAR_CACHE
6910 && !ALLOCA_FUNCTION_CODE_P (fcode)
6911 && fcode != BUILT_IN_FREE)
6912 return expand_call (exp, target, ignore);
6914 /* The built-in function expanders test for target == const0_rtx
6915 to determine whether the function's result will be ignored. */
6916 if (ignore)
6917 target = const0_rtx;
6919 /* If the result of a pure or const built-in function is ignored, and
6920 none of its arguments are volatile, we can avoid expanding the
6921 built-in call and just evaluate the arguments for side-effects. */
6922 if (target == const0_rtx
6923 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6924 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6926 bool volatilep = false;
6927 tree arg;
6928 call_expr_arg_iterator iter;
6930 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6931 if (TREE_THIS_VOLATILE (arg))
6933 volatilep = true;
6934 break;
6937 if (! volatilep)
6939 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6940 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6941 return const0_rtx;
6945 switch (fcode)
6947 CASE_FLT_FN (BUILT_IN_FABS):
6948 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6949 case BUILT_IN_FABSD32:
6950 case BUILT_IN_FABSD64:
6951 case BUILT_IN_FABSD128:
6952 target = expand_builtin_fabs (exp, target, subtarget);
6953 if (target)
6954 return target;
6955 break;
6957 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6958 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6959 target = expand_builtin_copysign (exp, target, subtarget);
6960 if (target)
6961 return target;
6962 break;
6964 /* Just do a normal library call if we were unable to fold
6965 the values. */
6966 CASE_FLT_FN (BUILT_IN_CABS):
6967 break;
6969 CASE_FLT_FN (BUILT_IN_FMA):
6970 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6971 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6972 if (target)
6973 return target;
6974 break;
6976 CASE_FLT_FN (BUILT_IN_ILOGB):
6977 if (! flag_unsafe_math_optimizations)
6978 break;
6979 gcc_fallthrough ();
6980 CASE_FLT_FN (BUILT_IN_ISINF):
6981 CASE_FLT_FN (BUILT_IN_FINITE):
6982 case BUILT_IN_ISFINITE:
6983 case BUILT_IN_ISNORMAL:
6984 target = expand_builtin_interclass_mathfn (exp, target);
6985 if (target)
6986 return target;
6987 break;
6989 CASE_FLT_FN (BUILT_IN_ICEIL):
6990 CASE_FLT_FN (BUILT_IN_LCEIL):
6991 CASE_FLT_FN (BUILT_IN_LLCEIL):
6992 CASE_FLT_FN (BUILT_IN_LFLOOR):
6993 CASE_FLT_FN (BUILT_IN_IFLOOR):
6994 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6995 target = expand_builtin_int_roundingfn (exp, target);
6996 if (target)
6997 return target;
6998 break;
7000 CASE_FLT_FN (BUILT_IN_IRINT):
7001 CASE_FLT_FN (BUILT_IN_LRINT):
7002 CASE_FLT_FN (BUILT_IN_LLRINT):
7003 CASE_FLT_FN (BUILT_IN_IROUND):
7004 CASE_FLT_FN (BUILT_IN_LROUND):
7005 CASE_FLT_FN (BUILT_IN_LLROUND):
7006 target = expand_builtin_int_roundingfn_2 (exp, target);
7007 if (target)
7008 return target;
7009 break;
7011 CASE_FLT_FN (BUILT_IN_POWI):
7012 target = expand_builtin_powi (exp, target);
7013 if (target)
7014 return target;
7015 break;
7017 CASE_FLT_FN (BUILT_IN_CEXPI):
7018 target = expand_builtin_cexpi (exp, target);
7019 gcc_assert (target);
7020 return target;
7022 CASE_FLT_FN (BUILT_IN_SIN):
7023 CASE_FLT_FN (BUILT_IN_COS):
7024 if (! flag_unsafe_math_optimizations)
7025 break;
7026 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7027 if (target)
7028 return target;
7029 break;
7031 CASE_FLT_FN (BUILT_IN_SINCOS):
7032 if (! flag_unsafe_math_optimizations)
7033 break;
7034 target = expand_builtin_sincos (exp);
7035 if (target)
7036 return target;
7037 break;
7039 case BUILT_IN_APPLY_ARGS:
7040 return expand_builtin_apply_args ();
7042 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7043 FUNCTION with a copy of the parameters described by
7044 ARGUMENTS, and ARGSIZE. It returns a block of memory
7045 allocated on the stack into which is stored all the registers
7046 that might possibly be used for returning the result of a
7047 function. ARGUMENTS is the value returned by
7048 __builtin_apply_args. ARGSIZE is the number of bytes of
7049 arguments that must be copied. ??? How should this value be
7050 computed? We'll also need a safe worst case value for varargs
7051 functions. */
7052 case BUILT_IN_APPLY:
7053 if (!validate_arglist (exp, POINTER_TYPE,
7054 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7055 && !validate_arglist (exp, REFERENCE_TYPE,
7056 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7057 return const0_rtx;
7058 else
7060 rtx ops[3];
7062 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7063 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7064 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7066 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7069 /* __builtin_return (RESULT) causes the function to return the
7070 value described by RESULT. RESULT is address of the block of
7071 memory returned by __builtin_apply. */
7072 case BUILT_IN_RETURN:
7073 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7074 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7075 return const0_rtx;
7077 case BUILT_IN_SAVEREGS:
7078 return expand_builtin_saveregs ();
7080 case BUILT_IN_VA_ARG_PACK:
7081 /* All valid uses of __builtin_va_arg_pack () are removed during
7082 inlining. */
7083 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7084 return const0_rtx;
7086 case BUILT_IN_VA_ARG_PACK_LEN:
7087 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7088 inlining. */
7089 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7090 return const0_rtx;
7092 /* Return the address of the first anonymous stack arg. */
7093 case BUILT_IN_NEXT_ARG:
7094 if (fold_builtin_next_arg (exp, false))
7095 return const0_rtx;
7096 return expand_builtin_next_arg ();
7098 case BUILT_IN_CLEAR_CACHE:
7099 expand_builtin___clear_cache (exp);
7100 return const0_rtx;
7102 case BUILT_IN_CLASSIFY_TYPE:
7103 return expand_builtin_classify_type (exp);
7105 case BUILT_IN_CONSTANT_P:
7106 return const0_rtx;
7108 case BUILT_IN_FRAME_ADDRESS:
7109 case BUILT_IN_RETURN_ADDRESS:
7110 return expand_builtin_frame_address (fndecl, exp);
7112 /* Returns the address of the area where the structure is returned.
7113 0 otherwise. */
7114 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7115 if (call_expr_nargs (exp) != 0
7116 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7117 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7118 return const0_rtx;
7119 else
7120 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7122 CASE_BUILT_IN_ALLOCA:
7123 target = expand_builtin_alloca (exp);
7124 if (target)
7125 return target;
7126 break;
7128 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7129 return expand_asan_emit_allocas_unpoison (exp);
7131 case BUILT_IN_STACK_SAVE:
7132 return expand_stack_save ();
7134 case BUILT_IN_STACK_RESTORE:
7135 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7136 return const0_rtx;
7138 case BUILT_IN_BSWAP16:
7139 case BUILT_IN_BSWAP32:
7140 case BUILT_IN_BSWAP64:
7141 case BUILT_IN_BSWAP128:
7142 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7143 if (target)
7144 return target;
7145 break;
7147 CASE_INT_FN (BUILT_IN_FFS):
7148 target = expand_builtin_unop (target_mode, exp, target,
7149 subtarget, ffs_optab);
7150 if (target)
7151 return target;
7152 break;
7154 CASE_INT_FN (BUILT_IN_CLZ):
7155 target = expand_builtin_unop (target_mode, exp, target,
7156 subtarget, clz_optab);
7157 if (target)
7158 return target;
7159 break;
7161 CASE_INT_FN (BUILT_IN_CTZ):
7162 target = expand_builtin_unop (target_mode, exp, target,
7163 subtarget, ctz_optab);
7164 if (target)
7165 return target;
7166 break;
7168 CASE_INT_FN (BUILT_IN_CLRSB):
7169 target = expand_builtin_unop (target_mode, exp, target,
7170 subtarget, clrsb_optab);
7171 if (target)
7172 return target;
7173 break;
7175 CASE_INT_FN (BUILT_IN_POPCOUNT):
7176 target = expand_builtin_unop (target_mode, exp, target,
7177 subtarget, popcount_optab);
7178 if (target)
7179 return target;
7180 break;
7182 CASE_INT_FN (BUILT_IN_PARITY):
7183 target = expand_builtin_unop (target_mode, exp, target,
7184 subtarget, parity_optab);
7185 if (target)
7186 return target;
7187 break;
7189 case BUILT_IN_STRLEN:
7190 target = expand_builtin_strlen (exp, target, target_mode);
7191 if (target)
7192 return target;
7193 break;
7195 case BUILT_IN_STRNLEN:
7196 target = expand_builtin_strnlen (exp, target, target_mode);
7197 if (target)
7198 return target;
7199 break;
7201 case BUILT_IN_STRCPY:
7202 target = expand_builtin_strcpy (exp, target);
7203 if (target)
7204 return target;
7205 break;
7207 case BUILT_IN_STRNCPY:
7208 target = expand_builtin_strncpy (exp, target);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_STPCPY:
7214 target = expand_builtin_stpcpy (exp, target, mode);
7215 if (target)
7216 return target;
7217 break;
7219 case BUILT_IN_MEMCPY:
7220 target = expand_builtin_memcpy (exp, target);
7221 if (target)
7222 return target;
7223 break;
7225 case BUILT_IN_MEMMOVE:
7226 target = expand_builtin_memmove (exp, target);
7227 if (target)
7228 return target;
7229 break;
7231 case BUILT_IN_MEMPCPY:
7232 target = expand_builtin_mempcpy (exp, target);
7233 if (target)
7234 return target;
7235 break;
7237 case BUILT_IN_MEMSET:
7238 target = expand_builtin_memset (exp, target, mode);
7239 if (target)
7240 return target;
7241 break;
7243 case BUILT_IN_BZERO:
7244 target = expand_builtin_bzero (exp);
7245 if (target)
7246 return target;
7247 break;
7249 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7250 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7251 when changing it to a strcmp call. */
7252 case BUILT_IN_STRCMP_EQ:
7253 target = expand_builtin_memcmp (exp, target, true);
7254 if (target)
7255 return target;
7257 /* Change this call back to a BUILT_IN_STRCMP. */
7258 TREE_OPERAND (exp, 1)
7259 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7261 /* Delete the last parameter. */
7262 unsigned int i;
7263 vec<tree, va_gc> *arg_vec;
7264 vec_alloc (arg_vec, 2);
7265 for (i = 0; i < 2; i++)
7266 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7267 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7268 /* FALLTHROUGH */
7270 case BUILT_IN_STRCMP:
7271 target = expand_builtin_strcmp (exp, target);
7272 if (target)
7273 return target;
7274 break;
7276 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7277 back to a BUILT_IN_STRNCMP. */
7278 case BUILT_IN_STRNCMP_EQ:
7279 target = expand_builtin_memcmp (exp, target, true);
7280 if (target)
7281 return target;
7283 /* Change it back to a BUILT_IN_STRNCMP. */
7284 TREE_OPERAND (exp, 1)
7285 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7286 /* FALLTHROUGH */
7288 case BUILT_IN_STRNCMP:
7289 target = expand_builtin_strncmp (exp, target, mode);
7290 if (target)
7291 return target;
7292 break;
7294 case BUILT_IN_BCMP:
7295 case BUILT_IN_MEMCMP:
7296 case BUILT_IN_MEMCMP_EQ:
7297 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7298 if (target)
7299 return target;
7300 if (fcode == BUILT_IN_MEMCMP_EQ)
7302 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7303 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7305 break;
7307 case BUILT_IN_SETJMP:
7308 /* This should have been lowered to the builtins below. */
7309 gcc_unreachable ();
7311 case BUILT_IN_SETJMP_SETUP:
7312 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7313 and the receiver label. */
7314 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7316 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7317 VOIDmode, EXPAND_NORMAL);
7318 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7319 rtx_insn *label_r = label_rtx (label);
7321 /* This is copied from the handling of non-local gotos. */
7322 expand_builtin_setjmp_setup (buf_addr, label_r);
7323 nonlocal_goto_handler_labels
7324 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7325 nonlocal_goto_handler_labels);
7326 /* ??? Do not let expand_label treat us as such since we would
7327 not want to be both on the list of non-local labels and on
7328 the list of forced labels. */
7329 FORCED_LABEL (label) = 0;
7330 return const0_rtx;
7332 break;
7334 case BUILT_IN_SETJMP_RECEIVER:
7335 /* __builtin_setjmp_receiver is passed the receiver label. */
7336 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7338 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7339 rtx_insn *label_r = label_rtx (label);
7341 expand_builtin_setjmp_receiver (label_r);
7342 return const0_rtx;
7344 break;
7346 /* __builtin_longjmp is passed a pointer to an array of five words.
7347 It's similar to the C library longjmp function but works with
7348 __builtin_setjmp above. */
7349 case BUILT_IN_LONGJMP:
7350 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7352 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7353 VOIDmode, EXPAND_NORMAL);
7354 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7356 if (value != const1_rtx)
7358 error ("%<__builtin_longjmp%> second argument must be 1");
7359 return const0_rtx;
7362 expand_builtin_longjmp (buf_addr, value);
7363 return const0_rtx;
7365 break;
7367 case BUILT_IN_NONLOCAL_GOTO:
7368 target = expand_builtin_nonlocal_goto (exp);
7369 if (target)
7370 return target;
7371 break;
7373 /* This updates the setjmp buffer that is its argument with the value
7374 of the current stack pointer. */
7375 case BUILT_IN_UPDATE_SETJMP_BUF:
7376 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7378 rtx buf_addr
7379 = expand_normal (CALL_EXPR_ARG (exp, 0));
7381 expand_builtin_update_setjmp_buf (buf_addr);
7382 return const0_rtx;
7384 break;
7386 case BUILT_IN_TRAP:
7387 expand_builtin_trap ();
7388 return const0_rtx;
7390 case BUILT_IN_UNREACHABLE:
7391 expand_builtin_unreachable ();
7392 return const0_rtx;
7394 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7395 case BUILT_IN_SIGNBITD32:
7396 case BUILT_IN_SIGNBITD64:
7397 case BUILT_IN_SIGNBITD128:
7398 target = expand_builtin_signbit (exp, target);
7399 if (target)
7400 return target;
7401 break;
7403 /* Various hooks for the DWARF 2 __throw routine. */
7404 case BUILT_IN_UNWIND_INIT:
7405 expand_builtin_unwind_init ();
7406 return const0_rtx;
7407 case BUILT_IN_DWARF_CFA:
7408 return virtual_cfa_rtx;
7409 #ifdef DWARF2_UNWIND_INFO
7410 case BUILT_IN_DWARF_SP_COLUMN:
7411 return expand_builtin_dwarf_sp_column ();
7412 case BUILT_IN_INIT_DWARF_REG_SIZES:
7413 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7414 return const0_rtx;
7415 #endif
7416 case BUILT_IN_FROB_RETURN_ADDR:
7417 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7418 case BUILT_IN_EXTRACT_RETURN_ADDR:
7419 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7420 case BUILT_IN_EH_RETURN:
7421 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7422 CALL_EXPR_ARG (exp, 1));
7423 return const0_rtx;
7424 case BUILT_IN_EH_RETURN_DATA_REGNO:
7425 return expand_builtin_eh_return_data_regno (exp);
7426 case BUILT_IN_EXTEND_POINTER:
7427 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7428 case BUILT_IN_EH_POINTER:
7429 return expand_builtin_eh_pointer (exp);
7430 case BUILT_IN_EH_FILTER:
7431 return expand_builtin_eh_filter (exp);
7432 case BUILT_IN_EH_COPY_VALUES:
7433 return expand_builtin_eh_copy_values (exp);
7435 case BUILT_IN_VA_START:
7436 return expand_builtin_va_start (exp);
7437 case BUILT_IN_VA_END:
7438 return expand_builtin_va_end (exp);
7439 case BUILT_IN_VA_COPY:
7440 return expand_builtin_va_copy (exp);
7441 case BUILT_IN_EXPECT:
7442 return expand_builtin_expect (exp, target);
7443 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7444 return expand_builtin_expect_with_probability (exp, target);
7445 case BUILT_IN_ASSUME_ALIGNED:
7446 return expand_builtin_assume_aligned (exp, target);
7447 case BUILT_IN_PREFETCH:
7448 expand_builtin_prefetch (exp);
7449 return const0_rtx;
7451 case BUILT_IN_INIT_TRAMPOLINE:
7452 return expand_builtin_init_trampoline (exp, true);
7453 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7454 return expand_builtin_init_trampoline (exp, false);
7455 case BUILT_IN_ADJUST_TRAMPOLINE:
7456 return expand_builtin_adjust_trampoline (exp);
7458 case BUILT_IN_INIT_DESCRIPTOR:
7459 return expand_builtin_init_descriptor (exp);
7460 case BUILT_IN_ADJUST_DESCRIPTOR:
7461 return expand_builtin_adjust_descriptor (exp);
7463 case BUILT_IN_FORK:
7464 case BUILT_IN_EXECL:
7465 case BUILT_IN_EXECV:
7466 case BUILT_IN_EXECLP:
7467 case BUILT_IN_EXECLE:
7468 case BUILT_IN_EXECVP:
7469 case BUILT_IN_EXECVE:
7470 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7471 if (target)
7472 return target;
7473 break;
7475 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7476 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7477 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7478 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7479 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7481 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7482 if (target)
7483 return target;
7484 break;
7486 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7487 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7488 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7489 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7490 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7492 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7493 if (target)
7494 return target;
7495 break;
7497 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7498 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7499 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7500 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7501 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7503 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7504 if (target)
7505 return target;
7506 break;
7508 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7509 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7510 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7511 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7512 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7514 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7515 if (target)
7516 return target;
7517 break;
7519 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7520 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7521 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7522 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7523 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7525 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7526 if (target)
7527 return target;
7528 break;
7530 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7531 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7532 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7533 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7534 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7536 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7537 if (target)
7538 return target;
7539 break;
7541 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7542 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7543 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7544 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7545 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7546 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7547 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7548 if (target)
7549 return target;
7550 break;
7552 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7553 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7554 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7555 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7556 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7558 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7559 if (target)
7560 return target;
7561 break;
7563 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7564 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7565 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7566 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7567 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7568 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7569 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7570 if (target)
7571 return target;
7572 break;
7574 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7575 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7576 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7577 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7578 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7579 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7580 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7581 if (target)
7582 return target;
7583 break;
7585 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7586 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7587 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7588 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7589 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7591 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7592 if (target)
7593 return target;
7594 break;
7596 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7597 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7598 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7599 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7600 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7602 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7603 if (target)
7604 return target;
7605 break;
7607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7608 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7609 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7610 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7611 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7612 if (mode == VOIDmode)
7613 mode = TYPE_MODE (boolean_type_node);
7614 if (!target || !register_operand (target, mode))
7615 target = gen_reg_rtx (mode);
7617 mode = get_builtin_sync_mode
7618 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7619 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7620 if (target)
7621 return target;
7622 break;
7624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7625 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7626 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7627 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7628 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7629 mode = get_builtin_sync_mode
7630 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7631 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7632 if (target)
7633 return target;
7634 break;
7636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7637 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7638 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7639 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7640 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7641 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7642 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7643 if (target)
7644 return target;
7645 break;
7647 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7648 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7649 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7650 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7651 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7652 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7653 expand_builtin_sync_lock_release (mode, exp);
7654 return const0_rtx;
7656 case BUILT_IN_SYNC_SYNCHRONIZE:
7657 expand_builtin_sync_synchronize ();
7658 return const0_rtx;
7660 case BUILT_IN_ATOMIC_EXCHANGE_1:
7661 case BUILT_IN_ATOMIC_EXCHANGE_2:
7662 case BUILT_IN_ATOMIC_EXCHANGE_4:
7663 case BUILT_IN_ATOMIC_EXCHANGE_8:
7664 case BUILT_IN_ATOMIC_EXCHANGE_16:
7665 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7666 target = expand_builtin_atomic_exchange (mode, exp, target);
7667 if (target)
7668 return target;
7669 break;
7671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7672 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7673 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7674 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7675 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7677 unsigned int nargs, z;
7678 vec<tree, va_gc> *vec;
7680 mode =
7681 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7682 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7683 if (target)
7684 return target;
7686 /* If this is turned into an external library call, the weak parameter
7687 must be dropped to match the expected parameter list. */
7688 nargs = call_expr_nargs (exp);
7689 vec_alloc (vec, nargs - 1);
7690 for (z = 0; z < 3; z++)
7691 vec->quick_push (CALL_EXPR_ARG (exp, z));
7692 /* Skip the boolean weak parameter. */
7693 for (z = 4; z < 6; z++)
7694 vec->quick_push (CALL_EXPR_ARG (exp, z));
7695 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7696 break;
7699 case BUILT_IN_ATOMIC_LOAD_1:
7700 case BUILT_IN_ATOMIC_LOAD_2:
7701 case BUILT_IN_ATOMIC_LOAD_4:
7702 case BUILT_IN_ATOMIC_LOAD_8:
7703 case BUILT_IN_ATOMIC_LOAD_16:
7704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7705 target = expand_builtin_atomic_load (mode, exp, target);
7706 if (target)
7707 return target;
7708 break;
7710 case BUILT_IN_ATOMIC_STORE_1:
7711 case BUILT_IN_ATOMIC_STORE_2:
7712 case BUILT_IN_ATOMIC_STORE_4:
7713 case BUILT_IN_ATOMIC_STORE_8:
7714 case BUILT_IN_ATOMIC_STORE_16:
7715 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7716 target = expand_builtin_atomic_store (mode, exp);
7717 if (target)
7718 return const0_rtx;
7719 break;
7721 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7722 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7723 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7724 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7725 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7727 enum built_in_function lib;
7728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7730 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7731 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7732 ignore, lib);
7733 if (target)
7734 return target;
7735 break;
7737 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7738 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7739 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7740 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7741 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7743 enum built_in_function lib;
7744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7746 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7747 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7748 ignore, lib);
7749 if (target)
7750 return target;
7751 break;
7753 case BUILT_IN_ATOMIC_AND_FETCH_1:
7754 case BUILT_IN_ATOMIC_AND_FETCH_2:
7755 case BUILT_IN_ATOMIC_AND_FETCH_4:
7756 case BUILT_IN_ATOMIC_AND_FETCH_8:
7757 case BUILT_IN_ATOMIC_AND_FETCH_16:
7759 enum built_in_function lib;
7760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7761 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7762 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7763 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7764 ignore, lib);
7765 if (target)
7766 return target;
7767 break;
7769 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7770 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7771 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7772 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7773 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7775 enum built_in_function lib;
7776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7777 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7778 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7779 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7780 ignore, lib);
7781 if (target)
7782 return target;
7783 break;
7785 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7786 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7787 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7788 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7789 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7791 enum built_in_function lib;
7792 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7793 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7794 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7795 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7796 ignore, lib);
7797 if (target)
7798 return target;
7799 break;
7801 case BUILT_IN_ATOMIC_OR_FETCH_1:
7802 case BUILT_IN_ATOMIC_OR_FETCH_2:
7803 case BUILT_IN_ATOMIC_OR_FETCH_4:
7804 case BUILT_IN_ATOMIC_OR_FETCH_8:
7805 case BUILT_IN_ATOMIC_OR_FETCH_16:
7807 enum built_in_function lib;
7808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7809 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7810 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7811 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7812 ignore, lib);
7813 if (target)
7814 return target;
7815 break;
7817 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7818 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7819 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7820 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7821 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7822 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7823 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7824 ignore, BUILT_IN_NONE);
7825 if (target)
7826 return target;
7827 break;
7829 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7830 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7831 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7832 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7833 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7835 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7836 ignore, BUILT_IN_NONE);
7837 if (target)
7838 return target;
7839 break;
7841 case BUILT_IN_ATOMIC_FETCH_AND_1:
7842 case BUILT_IN_ATOMIC_FETCH_AND_2:
7843 case BUILT_IN_ATOMIC_FETCH_AND_4:
7844 case BUILT_IN_ATOMIC_FETCH_AND_8:
7845 case BUILT_IN_ATOMIC_FETCH_AND_16:
7846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7847 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7848 ignore, BUILT_IN_NONE);
7849 if (target)
7850 return target;
7851 break;
7853 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7854 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7855 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7856 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7857 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7858 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7859 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7860 ignore, BUILT_IN_NONE);
7861 if (target)
7862 return target;
7863 break;
7865 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7866 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7867 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7868 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7869 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7870 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7871 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7872 ignore, BUILT_IN_NONE);
7873 if (target)
7874 return target;
7875 break;
7877 case BUILT_IN_ATOMIC_FETCH_OR_1:
7878 case BUILT_IN_ATOMIC_FETCH_OR_2:
7879 case BUILT_IN_ATOMIC_FETCH_OR_4:
7880 case BUILT_IN_ATOMIC_FETCH_OR_8:
7881 case BUILT_IN_ATOMIC_FETCH_OR_16:
7882 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7883 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7884 ignore, BUILT_IN_NONE);
7885 if (target)
7886 return target;
7887 break;
7889 case BUILT_IN_ATOMIC_TEST_AND_SET:
7890 return expand_builtin_atomic_test_and_set (exp, target);
7892 case BUILT_IN_ATOMIC_CLEAR:
7893 return expand_builtin_atomic_clear (exp);
7895 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7896 return expand_builtin_atomic_always_lock_free (exp);
7898 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7899 target = expand_builtin_atomic_is_lock_free (exp);
7900 if (target)
7901 return target;
7902 break;
7904 case BUILT_IN_ATOMIC_THREAD_FENCE:
7905 expand_builtin_atomic_thread_fence (exp);
7906 return const0_rtx;
7908 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7909 expand_builtin_atomic_signal_fence (exp);
7910 return const0_rtx;
7912 case BUILT_IN_OBJECT_SIZE:
7913 return expand_builtin_object_size (exp);
7915 case BUILT_IN_MEMCPY_CHK:
7916 case BUILT_IN_MEMPCPY_CHK:
7917 case BUILT_IN_MEMMOVE_CHK:
7918 case BUILT_IN_MEMSET_CHK:
7919 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7920 if (target)
7921 return target;
7922 break;
7924 case BUILT_IN_STRCPY_CHK:
7925 case BUILT_IN_STPCPY_CHK:
7926 case BUILT_IN_STRNCPY_CHK:
7927 case BUILT_IN_STPNCPY_CHK:
7928 case BUILT_IN_STRCAT_CHK:
7929 case BUILT_IN_STRNCAT_CHK:
7930 case BUILT_IN_SNPRINTF_CHK:
7931 case BUILT_IN_VSNPRINTF_CHK:
7932 maybe_emit_chk_warning (exp, fcode);
7933 break;
7935 case BUILT_IN_SPRINTF_CHK:
7936 case BUILT_IN_VSPRINTF_CHK:
7937 maybe_emit_sprintf_chk_warning (exp, fcode);
7938 break;
7940 case BUILT_IN_THREAD_POINTER:
7941 return expand_builtin_thread_pointer (exp, target);
7943 case BUILT_IN_SET_THREAD_POINTER:
7944 expand_builtin_set_thread_pointer (exp);
7945 return const0_rtx;
7947 case BUILT_IN_ACC_ON_DEVICE:
7948 /* Do library call, if we failed to expand the builtin when
7949 folding. */
7950 break;
7952 case BUILT_IN_GOACC_PARLEVEL_ID:
7953 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7954 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7956 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
7957 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
7959 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
7960 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
7961 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
7962 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
7963 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
7964 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
7965 return expand_speculation_safe_value (mode, exp, target, ignore);
7967 default: /* just do library call, if unknown builtin */
7968 break;
7971 /* The switch statement above can drop through to cause the function
7972 to be called normally. */
7973 return expand_call (exp, target, ignore);
7976 /* Determine whether a tree node represents a call to a built-in
7977 function. If the tree T is a call to a built-in function with
7978 the right number of arguments of the appropriate types, return
7979 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7980 Otherwise the return value is END_BUILTINS. */
7982 enum built_in_function
7983 builtin_mathfn_code (const_tree t)
7985 const_tree fndecl, arg, parmlist;
7986 const_tree argtype, parmtype;
7987 const_call_expr_arg_iterator iter;
7989 if (TREE_CODE (t) != CALL_EXPR)
7990 return END_BUILTINS;
7992 fndecl = get_callee_fndecl (t);
7993 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7994 return END_BUILTINS;
7996 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7997 init_const_call_expr_arg_iterator (t, &iter);
7998 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8000 /* If a function doesn't take a variable number of arguments,
8001 the last element in the list will have type `void'. */
8002 parmtype = TREE_VALUE (parmlist);
8003 if (VOID_TYPE_P (parmtype))
8005 if (more_const_call_expr_args_p (&iter))
8006 return END_BUILTINS;
8007 return DECL_FUNCTION_CODE (fndecl);
8010 if (! more_const_call_expr_args_p (&iter))
8011 return END_BUILTINS;
8013 arg = next_const_call_expr_arg (&iter);
8014 argtype = TREE_TYPE (arg);
8016 if (SCALAR_FLOAT_TYPE_P (parmtype))
8018 if (! SCALAR_FLOAT_TYPE_P (argtype))
8019 return END_BUILTINS;
8021 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8023 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8024 return END_BUILTINS;
8026 else if (POINTER_TYPE_P (parmtype))
8028 if (! POINTER_TYPE_P (argtype))
8029 return END_BUILTINS;
8031 else if (INTEGRAL_TYPE_P (parmtype))
8033 if (! INTEGRAL_TYPE_P (argtype))
8034 return END_BUILTINS;
8036 else
8037 return END_BUILTINS;
8040 /* Variable-length argument list. */
8041 return DECL_FUNCTION_CODE (fndecl);
8044 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8045 evaluate to a constant. */
8047 static tree
8048 fold_builtin_constant_p (tree arg)
8050 /* We return 1 for a numeric type that's known to be a constant
8051 value at compile-time or for an aggregate type that's a
8052 literal constant. */
8053 STRIP_NOPS (arg);
8055 /* If we know this is a constant, emit the constant of one. */
8056 if (CONSTANT_CLASS_P (arg)
8057 || (TREE_CODE (arg) == CONSTRUCTOR
8058 && TREE_CONSTANT (arg)))
8059 return integer_one_node;
8060 if (TREE_CODE (arg) == ADDR_EXPR)
8062 tree op = TREE_OPERAND (arg, 0);
8063 if (TREE_CODE (op) == STRING_CST
8064 || (TREE_CODE (op) == ARRAY_REF
8065 && integer_zerop (TREE_OPERAND (op, 1))
8066 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8067 return integer_one_node;
8070 /* If this expression has side effects, show we don't know it to be a
8071 constant. Likewise if it's a pointer or aggregate type since in
8072 those case we only want literals, since those are only optimized
8073 when generating RTL, not later.
8074 And finally, if we are compiling an initializer, not code, we
8075 need to return a definite result now; there's not going to be any
8076 more optimization done. */
8077 if (TREE_SIDE_EFFECTS (arg)
8078 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8079 || POINTER_TYPE_P (TREE_TYPE (arg))
8080 || cfun == 0
8081 || folding_initializer
8082 || force_folding_builtin_constant_p)
8083 return integer_zero_node;
8085 return NULL_TREE;
8088 /* Create builtin_expect or builtin_expect_with_probability
8089 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8090 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8091 builtin_expect_with_probability instead uses third argument as PROBABILITY
8092 value. */
8094 static tree
8095 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8096 tree predictor, tree probability)
8098 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8100 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8101 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8102 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8103 ret_type = TREE_TYPE (TREE_TYPE (fn));
8104 pred_type = TREE_VALUE (arg_types);
8105 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8107 pred = fold_convert_loc (loc, pred_type, pred);
8108 expected = fold_convert_loc (loc, expected_type, expected);
8110 if (probability)
8111 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8112 else
8113 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8114 predictor);
8116 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8117 build_int_cst (ret_type, 0));
8120 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8121 NULL_TREE if no simplification is possible. */
8123 tree
8124 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8125 tree arg3)
8127 tree inner, fndecl, inner_arg0;
8128 enum tree_code code;
8130 /* Distribute the expected value over short-circuiting operators.
8131 See through the cast from truthvalue_type_node to long. */
8132 inner_arg0 = arg0;
8133 while (CONVERT_EXPR_P (inner_arg0)
8134 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8135 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8136 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8138 /* If this is a builtin_expect within a builtin_expect keep the
8139 inner one. See through a comparison against a constant. It
8140 might have been added to create a thruthvalue. */
8141 inner = inner_arg0;
8143 if (COMPARISON_CLASS_P (inner)
8144 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8145 inner = TREE_OPERAND (inner, 0);
8147 if (TREE_CODE (inner) == CALL_EXPR
8148 && (fndecl = get_callee_fndecl (inner))
8149 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8150 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8151 return arg0;
8153 inner = inner_arg0;
8154 code = TREE_CODE (inner);
8155 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8157 tree op0 = TREE_OPERAND (inner, 0);
8158 tree op1 = TREE_OPERAND (inner, 1);
8159 arg1 = save_expr (arg1);
8161 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8162 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8163 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8165 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8168 /* If the argument isn't invariant then there's nothing else we can do. */
8169 if (!TREE_CONSTANT (inner_arg0))
8170 return NULL_TREE;
8172 /* If we expect that a comparison against the argument will fold to
8173 a constant return the constant. In practice, this means a true
8174 constant or the address of a non-weak symbol. */
8175 inner = inner_arg0;
8176 STRIP_NOPS (inner);
8177 if (TREE_CODE (inner) == ADDR_EXPR)
8181 inner = TREE_OPERAND (inner, 0);
8183 while (TREE_CODE (inner) == COMPONENT_REF
8184 || TREE_CODE (inner) == ARRAY_REF);
8185 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8186 return NULL_TREE;
8189 /* Otherwise, ARG0 already has the proper type for the return value. */
8190 return arg0;
8193 /* Fold a call to __builtin_classify_type with argument ARG. */
8195 static tree
8196 fold_builtin_classify_type (tree arg)
8198 if (arg == 0)
8199 return build_int_cst (integer_type_node, no_type_class);
8201 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8204 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8205 ARG. */
8207 static tree
8208 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8210 if (!validate_arg (arg, POINTER_TYPE))
8211 return NULL_TREE;
8212 else
8214 c_strlen_data lendata = { };
8215 tree len = c_strlen (arg, 0, &lendata);
8217 if (len)
8218 return fold_convert_loc (loc, type, len);
8220 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8221 also early enough to detect invalid reads in multimensional
8222 arrays and struct members. */
8223 if (!lendata.decl)
8224 c_strlen (arg, 1, &lendata);
8226 if (lendata.decl)
8228 if (EXPR_HAS_LOCATION (arg))
8229 loc = EXPR_LOCATION (arg);
8230 else if (loc == UNKNOWN_LOCATION)
8231 loc = input_location;
8232 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8235 return NULL_TREE;
8239 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8241 static tree
8242 fold_builtin_inf (location_t loc, tree type, int warn)
8244 REAL_VALUE_TYPE real;
8246 /* __builtin_inff is intended to be usable to define INFINITY on all
8247 targets. If an infinity is not available, INFINITY expands "to a
8248 positive constant of type float that overflows at translation
8249 time", footnote "In this case, using INFINITY will violate the
8250 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8251 Thus we pedwarn to ensure this constraint violation is
8252 diagnosed. */
8253 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8254 pedwarn (loc, 0, "target format does not support infinity");
8256 real_inf (&real);
8257 return build_real (type, real);
8260 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8261 NULL_TREE if no simplification can be made. */
8263 static tree
8264 fold_builtin_sincos (location_t loc,
8265 tree arg0, tree arg1, tree arg2)
8267 tree type;
8268 tree fndecl, call = NULL_TREE;
8270 if (!validate_arg (arg0, REAL_TYPE)
8271 || !validate_arg (arg1, POINTER_TYPE)
8272 || !validate_arg (arg2, POINTER_TYPE))
8273 return NULL_TREE;
8275 type = TREE_TYPE (arg0);
8277 /* Calculate the result when the argument is a constant. */
8278 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8279 if (fn == END_BUILTINS)
8280 return NULL_TREE;
8282 /* Canonicalize sincos to cexpi. */
8283 if (TREE_CODE (arg0) == REAL_CST)
8285 tree complex_type = build_complex_type (type);
8286 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8288 if (!call)
8290 if (!targetm.libc_has_function (function_c99_math_complex, type)
8291 || !builtin_decl_implicit_p (fn))
8292 return NULL_TREE;
8293 fndecl = builtin_decl_explicit (fn);
8294 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8295 call = builtin_save_expr (call);
8298 tree ptype = build_pointer_type (type);
8299 arg1 = fold_convert (ptype, arg1);
8300 arg2 = fold_convert (ptype, arg2);
8301 return build2 (COMPOUND_EXPR, void_type_node,
8302 build2 (MODIFY_EXPR, void_type_node,
8303 build_fold_indirect_ref_loc (loc, arg1),
8304 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8305 build2 (MODIFY_EXPR, void_type_node,
8306 build_fold_indirect_ref_loc (loc, arg2),
8307 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8310 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8311 Return NULL_TREE if no simplification can be made. */
8313 static tree
8314 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8316 if (!validate_arg (arg1, POINTER_TYPE)
8317 || !validate_arg (arg2, POINTER_TYPE)
8318 || !validate_arg (len, INTEGER_TYPE))
8319 return NULL_TREE;
8321 /* If the LEN parameter is zero, return zero. */
8322 if (integer_zerop (len))
8323 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8324 arg1, arg2);
8326 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8327 if (operand_equal_p (arg1, arg2, 0))
8328 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8330 /* If len parameter is one, return an expression corresponding to
8331 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8332 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8334 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8335 tree cst_uchar_ptr_node
8336 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8338 tree ind1
8339 = fold_convert_loc (loc, integer_type_node,
8340 build1 (INDIRECT_REF, cst_uchar_node,
8341 fold_convert_loc (loc,
8342 cst_uchar_ptr_node,
8343 arg1)));
8344 tree ind2
8345 = fold_convert_loc (loc, integer_type_node,
8346 build1 (INDIRECT_REF, cst_uchar_node,
8347 fold_convert_loc (loc,
8348 cst_uchar_ptr_node,
8349 arg2)));
8350 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8353 return NULL_TREE;
8356 /* Fold a call to builtin isascii with argument ARG. */
8358 static tree
8359 fold_builtin_isascii (location_t loc, tree arg)
8361 if (!validate_arg (arg, INTEGER_TYPE))
8362 return NULL_TREE;
8363 else
8365 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8366 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8367 build_int_cst (integer_type_node,
8368 ~ (unsigned HOST_WIDE_INT) 0x7f));
8369 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8370 arg, integer_zero_node);
8374 /* Fold a call to builtin toascii with argument ARG. */
8376 static tree
8377 fold_builtin_toascii (location_t loc, tree arg)
8379 if (!validate_arg (arg, INTEGER_TYPE))
8380 return NULL_TREE;
8382 /* Transform toascii(c) -> (c & 0x7f). */
8383 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8384 build_int_cst (integer_type_node, 0x7f));
8387 /* Fold a call to builtin isdigit with argument ARG. */
8389 static tree
8390 fold_builtin_isdigit (location_t loc, tree arg)
8392 if (!validate_arg (arg, INTEGER_TYPE))
8393 return NULL_TREE;
8394 else
8396 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8397 /* According to the C standard, isdigit is unaffected by locale.
8398 However, it definitely is affected by the target character set. */
8399 unsigned HOST_WIDE_INT target_digit0
8400 = lang_hooks.to_target_charset ('0');
8402 if (target_digit0 == 0)
8403 return NULL_TREE;
8405 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8406 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8407 build_int_cst (unsigned_type_node, target_digit0));
8408 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8409 build_int_cst (unsigned_type_node, 9));
8413 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8415 static tree
8416 fold_builtin_fabs (location_t loc, tree arg, tree type)
8418 if (!validate_arg (arg, REAL_TYPE))
8419 return NULL_TREE;
8421 arg = fold_convert_loc (loc, type, arg);
8422 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8425 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8427 static tree
8428 fold_builtin_abs (location_t loc, tree arg, tree type)
8430 if (!validate_arg (arg, INTEGER_TYPE))
8431 return NULL_TREE;
8433 arg = fold_convert_loc (loc, type, arg);
8434 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8437 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8439 static tree
8440 fold_builtin_carg (location_t loc, tree arg, tree type)
8442 if (validate_arg (arg, COMPLEX_TYPE)
8443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8445 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8447 if (atan2_fn)
8449 tree new_arg = builtin_save_expr (arg);
8450 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8451 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8452 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8456 return NULL_TREE;
8459 /* Fold a call to builtin frexp, we can assume the base is 2. */
8461 static tree
8462 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8464 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8465 return NULL_TREE;
8467 STRIP_NOPS (arg0);
8469 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8470 return NULL_TREE;
8472 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8474 /* Proceed if a valid pointer type was passed in. */
8475 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8477 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8478 tree frac, exp;
8480 switch (value->cl)
8482 case rvc_zero:
8483 /* For +-0, return (*exp = 0, +-0). */
8484 exp = integer_zero_node;
8485 frac = arg0;
8486 break;
8487 case rvc_nan:
8488 case rvc_inf:
8489 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8490 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8491 case rvc_normal:
8493 /* Since the frexp function always expects base 2, and in
8494 GCC normalized significands are already in the range
8495 [0.5, 1.0), we have exactly what frexp wants. */
8496 REAL_VALUE_TYPE frac_rvt = *value;
8497 SET_REAL_EXP (&frac_rvt, 0);
8498 frac = build_real (rettype, frac_rvt);
8499 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8501 break;
8502 default:
8503 gcc_unreachable ();
8506 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8507 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8508 TREE_SIDE_EFFECTS (arg1) = 1;
8509 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8512 return NULL_TREE;
8515 /* Fold a call to builtin modf. */
8517 static tree
8518 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8520 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8521 return NULL_TREE;
8523 STRIP_NOPS (arg0);
8525 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8526 return NULL_TREE;
8528 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8530 /* Proceed if a valid pointer type was passed in. */
8531 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8533 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8534 REAL_VALUE_TYPE trunc, frac;
8536 switch (value->cl)
8538 case rvc_nan:
8539 case rvc_zero:
8540 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8541 trunc = frac = *value;
8542 break;
8543 case rvc_inf:
8544 /* For +-Inf, return (*arg1 = arg0, +-0). */
8545 frac = dconst0;
8546 frac.sign = value->sign;
8547 trunc = *value;
8548 break;
8549 case rvc_normal:
8550 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8551 real_trunc (&trunc, VOIDmode, value);
8552 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8553 /* If the original number was negative and already
8554 integral, then the fractional part is -0.0. */
8555 if (value->sign && frac.cl == rvc_zero)
8556 frac.sign = value->sign;
8557 break;
8560 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8561 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8562 build_real (rettype, trunc));
8563 TREE_SIDE_EFFECTS (arg1) = 1;
8564 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8565 build_real (rettype, frac));
8568 return NULL_TREE;
8571 /* Given a location LOC, an interclass builtin function decl FNDECL
8572 and its single argument ARG, return an folded expression computing
8573 the same, or NULL_TREE if we either couldn't or didn't want to fold
8574 (the latter happen if there's an RTL instruction available). */
8576 static tree
8577 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8579 machine_mode mode;
8581 if (!validate_arg (arg, REAL_TYPE))
8582 return NULL_TREE;
8584 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8585 return NULL_TREE;
8587 mode = TYPE_MODE (TREE_TYPE (arg));
8589 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8591 /* If there is no optab, try generic code. */
8592 switch (DECL_FUNCTION_CODE (fndecl))
8594 tree result;
8596 CASE_FLT_FN (BUILT_IN_ISINF):
8598 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8599 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8600 tree type = TREE_TYPE (arg);
8601 REAL_VALUE_TYPE r;
8602 char buf[128];
8604 if (is_ibm_extended)
8606 /* NaN and Inf are encoded in the high-order double value
8607 only. The low-order value is not significant. */
8608 type = double_type_node;
8609 mode = DFmode;
8610 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8612 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8613 real_from_string (&r, buf);
8614 result = build_call_expr (isgr_fn, 2,
8615 fold_build1_loc (loc, ABS_EXPR, type, arg),
8616 build_real (type, r));
8617 return result;
8619 CASE_FLT_FN (BUILT_IN_FINITE):
8620 case BUILT_IN_ISFINITE:
8622 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8623 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8624 tree type = TREE_TYPE (arg);
8625 REAL_VALUE_TYPE r;
8626 char buf[128];
8628 if (is_ibm_extended)
8630 /* NaN and Inf are encoded in the high-order double value
8631 only. The low-order value is not significant. */
8632 type = double_type_node;
8633 mode = DFmode;
8634 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8636 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8637 real_from_string (&r, buf);
8638 result = build_call_expr (isle_fn, 2,
8639 fold_build1_loc (loc, ABS_EXPR, type, arg),
8640 build_real (type, r));
8641 /*result = fold_build2_loc (loc, UNGT_EXPR,
8642 TREE_TYPE (TREE_TYPE (fndecl)),
8643 fold_build1_loc (loc, ABS_EXPR, type, arg),
8644 build_real (type, r));
8645 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8646 TREE_TYPE (TREE_TYPE (fndecl)),
8647 result);*/
8648 return result;
8650 case BUILT_IN_ISNORMAL:
8652 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8653 islessequal(fabs(x),DBL_MAX). */
8654 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8655 tree type = TREE_TYPE (arg);
8656 tree orig_arg, max_exp, min_exp;
8657 machine_mode orig_mode = mode;
8658 REAL_VALUE_TYPE rmax, rmin;
8659 char buf[128];
8661 orig_arg = arg = builtin_save_expr (arg);
8662 if (is_ibm_extended)
8664 /* Use double to test the normal range of IBM extended
8665 precision. Emin for IBM extended precision is
8666 different to emin for IEEE double, being 53 higher
8667 since the low double exponent is at least 53 lower
8668 than the high double exponent. */
8669 type = double_type_node;
8670 mode = DFmode;
8671 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8673 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8675 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8676 real_from_string (&rmax, buf);
8677 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8678 real_from_string (&rmin, buf);
8679 max_exp = build_real (type, rmax);
8680 min_exp = build_real (type, rmin);
8682 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8683 if (is_ibm_extended)
8685 /* Testing the high end of the range is done just using
8686 the high double, using the same test as isfinite().
8687 For the subnormal end of the range we first test the
8688 high double, then if its magnitude is equal to the
8689 limit of 0x1p-969, we test whether the low double is
8690 non-zero and opposite sign to the high double. */
8691 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8692 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8693 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8694 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8695 arg, min_exp);
8696 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8697 complex_double_type_node, orig_arg);
8698 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8699 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8700 tree zero = build_real (type, dconst0);
8701 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8702 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8703 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8704 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8705 fold_build3 (COND_EXPR,
8706 integer_type_node,
8707 hilt, logt, lolt));
8708 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8709 eq_min, ok_lo);
8710 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8711 gt_min, eq_min);
8713 else
8715 tree const isge_fn
8716 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8717 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8719 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8720 max_exp, min_exp);
8721 return result;
8723 default:
8724 break;
8727 return NULL_TREE;
8730 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8731 ARG is the argument for the call. */
8733 static tree
8734 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8736 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8738 if (!validate_arg (arg, REAL_TYPE))
8739 return NULL_TREE;
8741 switch (builtin_index)
8743 case BUILT_IN_ISINF:
8744 if (tree_expr_infinite_p (arg))
8745 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8746 if (!tree_expr_maybe_infinite_p (arg))
8747 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8748 return NULL_TREE;
8750 case BUILT_IN_ISINF_SIGN:
8752 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8753 /* In a boolean context, GCC will fold the inner COND_EXPR to
8754 1. So e.g. "if (isinf_sign(x))" would be folded to just
8755 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8756 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8757 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8758 tree tmp = NULL_TREE;
8760 arg = builtin_save_expr (arg);
8762 if (signbit_fn && isinf_fn)
8764 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8765 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8767 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8768 signbit_call, integer_zero_node);
8769 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8770 isinf_call, integer_zero_node);
8772 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8773 integer_minus_one_node, integer_one_node);
8774 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8775 isinf_call, tmp,
8776 integer_zero_node);
8779 return tmp;
8782 case BUILT_IN_ISFINITE:
8783 if (tree_expr_finite_p (arg))
8784 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8785 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8786 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8787 return NULL_TREE;
8789 case BUILT_IN_ISNAN:
8790 if (tree_expr_nan_p (arg))
8791 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8792 if (!tree_expr_maybe_nan_p (arg))
8793 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8796 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8797 if (is_ibm_extended)
8799 /* NaN and Inf are encoded in the high-order double value
8800 only. The low-order value is not significant. */
8801 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8804 arg = builtin_save_expr (arg);
8805 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8807 default:
8808 gcc_unreachable ();
8812 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8813 This builtin will generate code to return the appropriate floating
8814 point classification depending on the value of the floating point
8815 number passed in. The possible return values must be supplied as
8816 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8817 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8818 one floating point argument which is "type generic". */
8820 static tree
8821 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8823 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8824 arg, type, res, tmp;
8825 machine_mode mode;
8826 REAL_VALUE_TYPE r;
8827 char buf[128];
8829 /* Verify the required arguments in the original call. */
8830 if (nargs != 6
8831 || !validate_arg (args[0], INTEGER_TYPE)
8832 || !validate_arg (args[1], INTEGER_TYPE)
8833 || !validate_arg (args[2], INTEGER_TYPE)
8834 || !validate_arg (args[3], INTEGER_TYPE)
8835 || !validate_arg (args[4], INTEGER_TYPE)
8836 || !validate_arg (args[5], REAL_TYPE))
8837 return NULL_TREE;
8839 fp_nan = args[0];
8840 fp_infinite = args[1];
8841 fp_normal = args[2];
8842 fp_subnormal = args[3];
8843 fp_zero = args[4];
8844 arg = args[5];
8845 type = TREE_TYPE (arg);
8846 mode = TYPE_MODE (type);
8847 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8849 /* fpclassify(x) ->
8850 isnan(x) ? FP_NAN :
8851 (fabs(x) == Inf ? FP_INFINITE :
8852 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8853 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8855 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8856 build_real (type, dconst0));
8857 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8858 tmp, fp_zero, fp_subnormal);
8860 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8861 real_from_string (&r, buf);
8862 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8863 arg, build_real (type, r));
8864 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8866 if (tree_expr_maybe_infinite_p (arg))
8868 real_inf (&r);
8869 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8870 build_real (type, r));
8871 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8872 fp_infinite, res);
8875 if (tree_expr_maybe_nan_p (arg))
8877 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8878 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8881 return res;
8884 /* Fold a call to an unordered comparison function such as
8885 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8886 being called and ARG0 and ARG1 are the arguments for the call.
8887 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8888 the opposite of the desired result. UNORDERED_CODE is used
8889 for modes that can hold NaNs and ORDERED_CODE is used for
8890 the rest. */
8892 static tree
8893 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8894 enum tree_code unordered_code,
8895 enum tree_code ordered_code)
8897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8898 enum tree_code code;
8899 tree type0, type1;
8900 enum tree_code code0, code1;
8901 tree cmp_type = NULL_TREE;
8903 type0 = TREE_TYPE (arg0);
8904 type1 = TREE_TYPE (arg1);
8906 code0 = TREE_CODE (type0);
8907 code1 = TREE_CODE (type1);
8909 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8910 /* Choose the wider of two real types. */
8911 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8912 ? type0 : type1;
8913 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8914 cmp_type = type0;
8915 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8916 cmp_type = type1;
8918 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8919 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8921 if (unordered_code == UNORDERED_EXPR)
8923 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
8924 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
8925 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
8926 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8927 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8930 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
8931 ? unordered_code : ordered_code;
8932 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8933 fold_build2_loc (loc, code, type, arg0, arg1));
8936 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8937 arithmetics if it can never overflow, or into internal functions that
8938 return both result of arithmetics and overflowed boolean flag in
8939 a complex integer result, or some other check for overflow.
8940 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8941 checking part of that. */
8943 static tree
8944 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8945 tree arg0, tree arg1, tree arg2)
8947 enum internal_fn ifn = IFN_LAST;
8948 /* The code of the expression corresponding to the built-in. */
8949 enum tree_code opcode = ERROR_MARK;
8950 bool ovf_only = false;
8952 switch (fcode)
8954 case BUILT_IN_ADD_OVERFLOW_P:
8955 ovf_only = true;
8956 /* FALLTHRU */
8957 case BUILT_IN_ADD_OVERFLOW:
8958 case BUILT_IN_SADD_OVERFLOW:
8959 case BUILT_IN_SADDL_OVERFLOW:
8960 case BUILT_IN_SADDLL_OVERFLOW:
8961 case BUILT_IN_UADD_OVERFLOW:
8962 case BUILT_IN_UADDL_OVERFLOW:
8963 case BUILT_IN_UADDLL_OVERFLOW:
8964 opcode = PLUS_EXPR;
8965 ifn = IFN_ADD_OVERFLOW;
8966 break;
8967 case BUILT_IN_SUB_OVERFLOW_P:
8968 ovf_only = true;
8969 /* FALLTHRU */
8970 case BUILT_IN_SUB_OVERFLOW:
8971 case BUILT_IN_SSUB_OVERFLOW:
8972 case BUILT_IN_SSUBL_OVERFLOW:
8973 case BUILT_IN_SSUBLL_OVERFLOW:
8974 case BUILT_IN_USUB_OVERFLOW:
8975 case BUILT_IN_USUBL_OVERFLOW:
8976 case BUILT_IN_USUBLL_OVERFLOW:
8977 opcode = MINUS_EXPR;
8978 ifn = IFN_SUB_OVERFLOW;
8979 break;
8980 case BUILT_IN_MUL_OVERFLOW_P:
8981 ovf_only = true;
8982 /* FALLTHRU */
8983 case BUILT_IN_MUL_OVERFLOW:
8984 case BUILT_IN_SMUL_OVERFLOW:
8985 case BUILT_IN_SMULL_OVERFLOW:
8986 case BUILT_IN_SMULLL_OVERFLOW:
8987 case BUILT_IN_UMUL_OVERFLOW:
8988 case BUILT_IN_UMULL_OVERFLOW:
8989 case BUILT_IN_UMULLL_OVERFLOW:
8990 opcode = MULT_EXPR;
8991 ifn = IFN_MUL_OVERFLOW;
8992 break;
8993 default:
8994 gcc_unreachable ();
8997 /* For the "generic" overloads, the first two arguments can have different
8998 types and the last argument determines the target type to use to check
8999 for overflow. The arguments of the other overloads all have the same
9000 type. */
9001 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9003 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9004 arguments are constant, attempt to fold the built-in call into a constant
9005 expression indicating whether or not it detected an overflow. */
9006 if (ovf_only
9007 && TREE_CODE (arg0) == INTEGER_CST
9008 && TREE_CODE (arg1) == INTEGER_CST)
9009 /* Perform the computation in the target type and check for overflow. */
9010 return omit_one_operand_loc (loc, boolean_type_node,
9011 arith_overflowed_p (opcode, type, arg0, arg1)
9012 ? boolean_true_node : boolean_false_node,
9013 arg2);
9015 tree intres, ovfres;
9016 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9018 intres = fold_binary_loc (loc, opcode, type,
9019 fold_convert_loc (loc, type, arg0),
9020 fold_convert_loc (loc, type, arg1));
9021 if (TREE_OVERFLOW (intres))
9022 intres = drop_tree_overflow (intres);
9023 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9024 ? boolean_true_node : boolean_false_node);
9026 else
9028 tree ctype = build_complex_type (type);
9029 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9030 arg0, arg1);
9031 tree tgt = save_expr (call);
9032 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9033 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9034 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9037 if (ovf_only)
9038 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9040 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9041 tree store
9042 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9043 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9046 /* Fold a call to __builtin_FILE to a constant string. */
9048 static inline tree
9049 fold_builtin_FILE (location_t loc)
9051 if (const char *fname = LOCATION_FILE (loc))
9053 /* The documentation says this builtin is equivalent to the preprocessor
9054 __FILE__ macro so it appears appropriate to use the same file prefix
9055 mappings. */
9056 fname = remap_macro_filename (fname);
9057 return build_string_literal (strlen (fname) + 1, fname);
9060 return build_string_literal (1, "");
9063 /* Fold a call to __builtin_FUNCTION to a constant string. */
9065 static inline tree
9066 fold_builtin_FUNCTION ()
9068 const char *name = "";
9070 if (current_function_decl)
9071 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9073 return build_string_literal (strlen (name) + 1, name);
9076 /* Fold a call to __builtin_LINE to an integer constant. */
9078 static inline tree
9079 fold_builtin_LINE (location_t loc, tree type)
9081 return build_int_cst (type, LOCATION_LINE (loc));
9084 /* Fold a call to built-in function FNDECL with 0 arguments.
9085 This function returns NULL_TREE if no simplification was possible. */
9087 static tree
9088 fold_builtin_0 (location_t loc, tree fndecl)
9090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9091 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9092 switch (fcode)
9094 case BUILT_IN_FILE:
9095 return fold_builtin_FILE (loc);
9097 case BUILT_IN_FUNCTION:
9098 return fold_builtin_FUNCTION ();
9100 case BUILT_IN_LINE:
9101 return fold_builtin_LINE (loc, type);
9103 CASE_FLT_FN (BUILT_IN_INF):
9104 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9105 case BUILT_IN_INFD32:
9106 case BUILT_IN_INFD64:
9107 case BUILT_IN_INFD128:
9108 return fold_builtin_inf (loc, type, true);
9110 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9111 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9112 return fold_builtin_inf (loc, type, false);
9114 case BUILT_IN_CLASSIFY_TYPE:
9115 return fold_builtin_classify_type (NULL_TREE);
9117 default:
9118 break;
9120 return NULL_TREE;
9123 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9124 This function returns NULL_TREE if no simplification was possible. */
9126 static tree
9127 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9129 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9130 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9132 if (TREE_CODE (arg0) == ERROR_MARK)
9133 return NULL_TREE;
9135 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9136 return ret;
9138 switch (fcode)
9140 case BUILT_IN_CONSTANT_P:
9142 tree val = fold_builtin_constant_p (arg0);
9144 /* Gimplification will pull the CALL_EXPR for the builtin out of
9145 an if condition. When not optimizing, we'll not CSE it back.
9146 To avoid link error types of regressions, return false now. */
9147 if (!val && !optimize)
9148 val = integer_zero_node;
9150 return val;
9153 case BUILT_IN_CLASSIFY_TYPE:
9154 return fold_builtin_classify_type (arg0);
9156 case BUILT_IN_STRLEN:
9157 return fold_builtin_strlen (loc, expr, type, arg0);
9159 CASE_FLT_FN (BUILT_IN_FABS):
9160 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9161 case BUILT_IN_FABSD32:
9162 case BUILT_IN_FABSD64:
9163 case BUILT_IN_FABSD128:
9164 return fold_builtin_fabs (loc, arg0, type);
9166 case BUILT_IN_ABS:
9167 case BUILT_IN_LABS:
9168 case BUILT_IN_LLABS:
9169 case BUILT_IN_IMAXABS:
9170 return fold_builtin_abs (loc, arg0, type);
9172 CASE_FLT_FN (BUILT_IN_CONJ):
9173 if (validate_arg (arg0, COMPLEX_TYPE)
9174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9175 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9176 break;
9178 CASE_FLT_FN (BUILT_IN_CREAL):
9179 if (validate_arg (arg0, COMPLEX_TYPE)
9180 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9181 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9182 break;
9184 CASE_FLT_FN (BUILT_IN_CIMAG):
9185 if (validate_arg (arg0, COMPLEX_TYPE)
9186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9187 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9188 break;
9190 CASE_FLT_FN (BUILT_IN_CARG):
9191 return fold_builtin_carg (loc, arg0, type);
9193 case BUILT_IN_ISASCII:
9194 return fold_builtin_isascii (loc, arg0);
9196 case BUILT_IN_TOASCII:
9197 return fold_builtin_toascii (loc, arg0);
9199 case BUILT_IN_ISDIGIT:
9200 return fold_builtin_isdigit (loc, arg0);
9202 CASE_FLT_FN (BUILT_IN_FINITE):
9203 case BUILT_IN_FINITED32:
9204 case BUILT_IN_FINITED64:
9205 case BUILT_IN_FINITED128:
9206 case BUILT_IN_ISFINITE:
9208 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9209 if (ret)
9210 return ret;
9211 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9214 CASE_FLT_FN (BUILT_IN_ISINF):
9215 case BUILT_IN_ISINFD32:
9216 case BUILT_IN_ISINFD64:
9217 case BUILT_IN_ISINFD128:
9219 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9220 if (ret)
9221 return ret;
9222 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9225 case BUILT_IN_ISNORMAL:
9226 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9228 case BUILT_IN_ISINF_SIGN:
9229 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9231 CASE_FLT_FN (BUILT_IN_ISNAN):
9232 case BUILT_IN_ISNAND32:
9233 case BUILT_IN_ISNAND64:
9234 case BUILT_IN_ISNAND128:
9235 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9237 case BUILT_IN_FREE:
9238 if (integer_zerop (arg0))
9239 return build_empty_stmt (loc);
9240 break;
9242 default:
9243 break;
9246 return NULL_TREE;
9250 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9251 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9252 if no simplification was possible. */
9254 static tree
9255 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9257 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9258 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9260 if (TREE_CODE (arg0) == ERROR_MARK
9261 || TREE_CODE (arg1) == ERROR_MARK)
9262 return NULL_TREE;
9264 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9265 return ret;
9267 switch (fcode)
9269 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9270 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9271 if (validate_arg (arg0, REAL_TYPE)
9272 && validate_arg (arg1, POINTER_TYPE))
9273 return do_mpfr_lgamma_r (arg0, arg1, type);
9274 break;
9276 CASE_FLT_FN (BUILT_IN_FREXP):
9277 return fold_builtin_frexp (loc, arg0, arg1, type);
9279 CASE_FLT_FN (BUILT_IN_MODF):
9280 return fold_builtin_modf (loc, arg0, arg1, type);
9282 case BUILT_IN_STRSPN:
9283 return fold_builtin_strspn (loc, expr, arg0, arg1);
9285 case BUILT_IN_STRCSPN:
9286 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9288 case BUILT_IN_STRPBRK:
9289 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9291 case BUILT_IN_EXPECT:
9292 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9294 case BUILT_IN_ISGREATER:
9295 return fold_builtin_unordered_cmp (loc, fndecl,
9296 arg0, arg1, UNLE_EXPR, LE_EXPR);
9297 case BUILT_IN_ISGREATEREQUAL:
9298 return fold_builtin_unordered_cmp (loc, fndecl,
9299 arg0, arg1, UNLT_EXPR, LT_EXPR);
9300 case BUILT_IN_ISLESS:
9301 return fold_builtin_unordered_cmp (loc, fndecl,
9302 arg0, arg1, UNGE_EXPR, GE_EXPR);
9303 case BUILT_IN_ISLESSEQUAL:
9304 return fold_builtin_unordered_cmp (loc, fndecl,
9305 arg0, arg1, UNGT_EXPR, GT_EXPR);
9306 case BUILT_IN_ISLESSGREATER:
9307 return fold_builtin_unordered_cmp (loc, fndecl,
9308 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9309 case BUILT_IN_ISUNORDERED:
9310 return fold_builtin_unordered_cmp (loc, fndecl,
9311 arg0, arg1, UNORDERED_EXPR,
9312 NOP_EXPR);
9314 /* We do the folding for va_start in the expander. */
9315 case BUILT_IN_VA_START:
9316 break;
9318 case BUILT_IN_OBJECT_SIZE:
9319 return fold_builtin_object_size (arg0, arg1);
9321 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9322 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9324 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9325 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9327 default:
9328 break;
9330 return NULL_TREE;
9333 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9334 and ARG2.
9335 This function returns NULL_TREE if no simplification was possible. */
9337 static tree
9338 fold_builtin_3 (location_t loc, tree fndecl,
9339 tree arg0, tree arg1, tree arg2)
9341 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9342 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9344 if (TREE_CODE (arg0) == ERROR_MARK
9345 || TREE_CODE (arg1) == ERROR_MARK
9346 || TREE_CODE (arg2) == ERROR_MARK)
9347 return NULL_TREE;
9349 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9350 arg0, arg1, arg2))
9351 return ret;
9353 switch (fcode)
9356 CASE_FLT_FN (BUILT_IN_SINCOS):
9357 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9359 CASE_FLT_FN (BUILT_IN_REMQUO):
9360 if (validate_arg (arg0, REAL_TYPE)
9361 && validate_arg (arg1, REAL_TYPE)
9362 && validate_arg (arg2, POINTER_TYPE))
9363 return do_mpfr_remquo (arg0, arg1, arg2);
9364 break;
9366 case BUILT_IN_MEMCMP:
9367 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9369 case BUILT_IN_EXPECT:
9370 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9372 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9373 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9375 case BUILT_IN_ADD_OVERFLOW:
9376 case BUILT_IN_SUB_OVERFLOW:
9377 case BUILT_IN_MUL_OVERFLOW:
9378 case BUILT_IN_ADD_OVERFLOW_P:
9379 case BUILT_IN_SUB_OVERFLOW_P:
9380 case BUILT_IN_MUL_OVERFLOW_P:
9381 case BUILT_IN_SADD_OVERFLOW:
9382 case BUILT_IN_SADDL_OVERFLOW:
9383 case BUILT_IN_SADDLL_OVERFLOW:
9384 case BUILT_IN_SSUB_OVERFLOW:
9385 case BUILT_IN_SSUBL_OVERFLOW:
9386 case BUILT_IN_SSUBLL_OVERFLOW:
9387 case BUILT_IN_SMUL_OVERFLOW:
9388 case BUILT_IN_SMULL_OVERFLOW:
9389 case BUILT_IN_SMULLL_OVERFLOW:
9390 case BUILT_IN_UADD_OVERFLOW:
9391 case BUILT_IN_UADDL_OVERFLOW:
9392 case BUILT_IN_UADDLL_OVERFLOW:
9393 case BUILT_IN_USUB_OVERFLOW:
9394 case BUILT_IN_USUBL_OVERFLOW:
9395 case BUILT_IN_USUBLL_OVERFLOW:
9396 case BUILT_IN_UMUL_OVERFLOW:
9397 case BUILT_IN_UMULL_OVERFLOW:
9398 case BUILT_IN_UMULLL_OVERFLOW:
9399 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9401 default:
9402 break;
9404 return NULL_TREE;
9407 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9408 ARGS is an array of NARGS arguments. IGNORE is true if the result
9409 of the function call is ignored. This function returns NULL_TREE
9410 if no simplification was possible. */
9412 static tree
9413 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9414 int nargs, bool)
9416 tree ret = NULL_TREE;
9418 switch (nargs)
9420 case 0:
9421 ret = fold_builtin_0 (loc, fndecl);
9422 break;
9423 case 1:
9424 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9425 break;
9426 case 2:
9427 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9428 break;
9429 case 3:
9430 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9431 break;
9432 default:
9433 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9434 break;
9436 if (ret)
9438 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9439 SET_EXPR_LOCATION (ret, loc);
9440 return ret;
9442 return NULL_TREE;
9445 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9446 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9447 of arguments in ARGS to be omitted. OLDNARGS is the number of
9448 elements in ARGS. */
9450 static tree
9451 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9452 int skip, tree fndecl, int n, va_list newargs)
9454 int nargs = oldnargs - skip + n;
9455 tree *buffer;
9457 if (n > 0)
9459 int i, j;
9461 buffer = XALLOCAVEC (tree, nargs);
9462 for (i = 0; i < n; i++)
9463 buffer[i] = va_arg (newargs, tree);
9464 for (j = skip; j < oldnargs; j++, i++)
9465 buffer[i] = args[j];
9467 else
9468 buffer = args + skip;
9470 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9473 /* Return true if FNDECL shouldn't be folded right now.
9474 If a built-in function has an inline attribute always_inline
9475 wrapper, defer folding it after always_inline functions have
9476 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9477 might not be performed. */
9479 bool
9480 avoid_folding_inline_builtin (tree fndecl)
9482 return (DECL_DECLARED_INLINE_P (fndecl)
9483 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9484 && cfun
9485 && !cfun->always_inline_functions_inlined
9486 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9489 /* A wrapper function for builtin folding that prevents warnings for
9490 "statement without effect" and the like, caused by removing the
9491 call node earlier than the warning is generated. */
9493 tree
9494 fold_call_expr (location_t loc, tree exp, bool ignore)
9496 tree ret = NULL_TREE;
9497 tree fndecl = get_callee_fndecl (exp);
9498 if (fndecl && fndecl_built_in_p (fndecl)
9499 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9500 yet. Defer folding until we see all the arguments
9501 (after inlining). */
9502 && !CALL_EXPR_VA_ARG_PACK (exp))
9504 int nargs = call_expr_nargs (exp);
9506 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9507 instead last argument is __builtin_va_arg_pack (). Defer folding
9508 even in that case, until arguments are finalized. */
9509 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9511 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9512 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9513 return NULL_TREE;
9516 if (avoid_folding_inline_builtin (fndecl))
9517 return NULL_TREE;
9519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9520 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9521 CALL_EXPR_ARGP (exp), ignore);
9522 else
9524 tree *args = CALL_EXPR_ARGP (exp);
9525 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9526 if (ret)
9527 return ret;
9530 return NULL_TREE;
9533 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9534 N arguments are passed in the array ARGARRAY. Return a folded
9535 expression or NULL_TREE if no simplification was possible. */
9537 tree
9538 fold_builtin_call_array (location_t loc, tree,
9539 tree fn,
9540 int n,
9541 tree *argarray)
9543 if (TREE_CODE (fn) != ADDR_EXPR)
9544 return NULL_TREE;
9546 tree fndecl = TREE_OPERAND (fn, 0);
9547 if (TREE_CODE (fndecl) == FUNCTION_DECL
9548 && fndecl_built_in_p (fndecl))
9550 /* If last argument is __builtin_va_arg_pack (), arguments to this
9551 function are not finalized yet. Defer folding until they are. */
9552 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9554 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9555 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9556 return NULL_TREE;
9558 if (avoid_folding_inline_builtin (fndecl))
9559 return NULL_TREE;
9560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9561 return targetm.fold_builtin (fndecl, n, argarray, false);
9562 else
9563 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9566 return NULL_TREE;
9569 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9570 along with N new arguments specified as the "..." parameters. SKIP
9571 is the number of arguments in EXP to be omitted. This function is used
9572 to do varargs-to-varargs transformations. */
9574 static tree
9575 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9577 va_list ap;
9578 tree t;
9580 va_start (ap, n);
9581 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9582 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9583 va_end (ap);
9585 return t;
9588 /* Validate a single argument ARG against a tree code CODE representing
9589 a type. Return true when argument is valid. */
9591 static bool
9592 validate_arg (const_tree arg, enum tree_code code)
9594 if (!arg)
9595 return false;
9596 else if (code == POINTER_TYPE)
9597 return POINTER_TYPE_P (TREE_TYPE (arg));
9598 else if (code == INTEGER_TYPE)
9599 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9600 return code == TREE_CODE (TREE_TYPE (arg));
9603 /* This function validates the types of a function call argument list
9604 against a specified list of tree_codes. If the last specifier is a 0,
9605 that represents an ellipses, otherwise the last specifier must be a
9606 VOID_TYPE.
9608 This is the GIMPLE version of validate_arglist. Eventually we want to
9609 completely convert builtins.c to work from GIMPLEs and the tree based
9610 validate_arglist will then be removed. */
9612 bool
9613 validate_gimple_arglist (const gcall *call, ...)
9615 enum tree_code code;
9616 bool res = 0;
9617 va_list ap;
9618 const_tree arg;
9619 size_t i;
9621 va_start (ap, call);
9622 i = 0;
9626 code = (enum tree_code) va_arg (ap, int);
9627 switch (code)
9629 case 0:
9630 /* This signifies an ellipses, any further arguments are all ok. */
9631 res = true;
9632 goto end;
9633 case VOID_TYPE:
9634 /* This signifies an endlink, if no arguments remain, return
9635 true, otherwise return false. */
9636 res = (i == gimple_call_num_args (call));
9637 goto end;
9638 default:
9639 /* If no parameters remain or the parameter's code does not
9640 match the specified code, return false. Otherwise continue
9641 checking any remaining arguments. */
9642 arg = gimple_call_arg (call, i++);
9643 if (!validate_arg (arg, code))
9644 goto end;
9645 break;
9648 while (1);
9650 /* We need gotos here since we can only have one VA_CLOSE in a
9651 function. */
9652 end: ;
9653 va_end (ap);
9655 return res;
9658 /* Default target-specific builtin expander that does nothing. */
9661 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9662 rtx target ATTRIBUTE_UNUSED,
9663 rtx subtarget ATTRIBUTE_UNUSED,
9664 machine_mode mode ATTRIBUTE_UNUSED,
9665 int ignore ATTRIBUTE_UNUSED)
9667 return NULL_RTX;
9670 /* Returns true is EXP represents data that would potentially reside
9671 in a readonly section. */
9673 bool
9674 readonly_data_expr (tree exp)
9676 STRIP_NOPS (exp);
9678 if (TREE_CODE (exp) != ADDR_EXPR)
9679 return false;
9681 exp = get_base_address (TREE_OPERAND (exp, 0));
9682 if (!exp)
9683 return false;
9685 /* Make sure we call decl_readonly_section only for trees it
9686 can handle (since it returns true for everything it doesn't
9687 understand). */
9688 if (TREE_CODE (exp) == STRING_CST
9689 || TREE_CODE (exp) == CONSTRUCTOR
9690 || (VAR_P (exp) && TREE_STATIC (exp)))
9691 return decl_readonly_section (exp, 0);
9692 else
9693 return false;
9696 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9697 to the call, and TYPE is its return type.
9699 Return NULL_TREE if no simplification was possible, otherwise return the
9700 simplified form of the call as a tree.
9702 The simplified form may be a constant or other expression which
9703 computes the same value, but in a more efficient manner (including
9704 calls to other builtin functions).
9706 The call may contain arguments which need to be evaluated, but
9707 which are not useful to determine the result of the call. In
9708 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9709 COMPOUND_EXPR will be an argument which must be evaluated.
9710 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9711 COMPOUND_EXPR in the chain will contain the tree for the simplified
9712 form of the builtin function call. */
9714 static tree
9715 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9717 if (!validate_arg (s1, POINTER_TYPE)
9718 || !validate_arg (s2, POINTER_TYPE))
9719 return NULL_TREE;
9721 tree fn;
9722 const char *p1, *p2;
9724 p2 = c_getstr (s2);
9725 if (p2 == NULL)
9726 return NULL_TREE;
9728 p1 = c_getstr (s1);
9729 if (p1 != NULL)
9731 const char *r = strpbrk (p1, p2);
9732 tree tem;
9734 if (r == NULL)
9735 return build_int_cst (TREE_TYPE (s1), 0);
9737 /* Return an offset into the constant string argument. */
9738 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9739 return fold_convert_loc (loc, type, tem);
9742 if (p2[0] == '\0')
9743 /* strpbrk(x, "") == NULL.
9744 Evaluate and ignore s1 in case it had side-effects. */
9745 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9747 if (p2[1] != '\0')
9748 return NULL_TREE; /* Really call strpbrk. */
9750 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9751 if (!fn)
9752 return NULL_TREE;
9754 /* New argument list transforming strpbrk(s1, s2) to
9755 strchr(s1, s2[0]). */
9756 return build_call_expr_loc (loc, fn, 2, s1,
9757 build_int_cst (integer_type_node, p2[0]));
9760 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9761 to the call.
9763 Return NULL_TREE if no simplification was possible, otherwise return the
9764 simplified form of the call as a tree.
9766 The simplified form may be a constant or other expression which
9767 computes the same value, but in a more efficient manner (including
9768 calls to other builtin functions).
9770 The call may contain arguments which need to be evaluated, but
9771 which are not useful to determine the result of the call. In
9772 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9773 COMPOUND_EXPR will be an argument which must be evaluated.
9774 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9775 COMPOUND_EXPR in the chain will contain the tree for the simplified
9776 form of the builtin function call. */
9778 static tree
9779 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9781 if (!validate_arg (s1, POINTER_TYPE)
9782 || !validate_arg (s2, POINTER_TYPE))
9783 return NULL_TREE;
9785 if (!check_nul_terminated_array (expr, s1)
9786 || !check_nul_terminated_array (expr, s2))
9787 return NULL_TREE;
9789 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9791 /* If either argument is "", return NULL_TREE. */
9792 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9793 /* Evaluate and ignore both arguments in case either one has
9794 side-effects. */
9795 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9796 s1, s2);
9797 return NULL_TREE;
9800 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9801 to the call.
9803 Return NULL_TREE if no simplification was possible, otherwise return the
9804 simplified form of the call as a tree.
9806 The simplified form may be a constant or other expression which
9807 computes the same value, but in a more efficient manner (including
9808 calls to other builtin functions).
9810 The call may contain arguments which need to be evaluated, but
9811 which are not useful to determine the result of the call. In
9812 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9813 COMPOUND_EXPR will be an argument which must be evaluated.
9814 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9815 COMPOUND_EXPR in the chain will contain the tree for the simplified
9816 form of the builtin function call. */
9818 static tree
9819 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9821 if (!validate_arg (s1, POINTER_TYPE)
9822 || !validate_arg (s2, POINTER_TYPE))
9823 return NULL_TREE;
9825 if (!check_nul_terminated_array (expr, s1)
9826 || !check_nul_terminated_array (expr, s2))
9827 return NULL_TREE;
9829 /* If the first argument is "", return NULL_TREE. */
9830 const char *p1 = c_getstr (s1);
9831 if (p1 && *p1 == '\0')
9833 /* Evaluate and ignore argument s2 in case it has
9834 side-effects. */
9835 return omit_one_operand_loc (loc, size_type_node,
9836 size_zero_node, s2);
9839 /* If the second argument is "", return __builtin_strlen(s1). */
9840 const char *p2 = c_getstr (s2);
9841 if (p2 && *p2 == '\0')
9843 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9845 /* If the replacement _DECL isn't initialized, don't do the
9846 transformation. */
9847 if (!fn)
9848 return NULL_TREE;
9850 return build_call_expr_loc (loc, fn, 1, s1);
9852 return NULL_TREE;
9855 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9856 produced. False otherwise. This is done so that we don't output the error
9857 or warning twice or three times. */
9859 bool
9860 fold_builtin_next_arg (tree exp, bool va_start_p)
9862 tree fntype = TREE_TYPE (current_function_decl);
9863 int nargs = call_expr_nargs (exp);
9864 tree arg;
9865 /* There is good chance the current input_location points inside the
9866 definition of the va_start macro (perhaps on the token for
9867 builtin) in a system header, so warnings will not be emitted.
9868 Use the location in real source code. */
9869 location_t current_location =
9870 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9871 NULL);
9873 if (!stdarg_p (fntype))
9875 error ("%<va_start%> used in function with fixed arguments");
9876 return true;
9879 if (va_start_p)
9881 if (va_start_p && (nargs != 2))
9883 error ("wrong number of arguments to function %<va_start%>");
9884 return true;
9886 arg = CALL_EXPR_ARG (exp, 1);
9888 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9889 when we checked the arguments and if needed issued a warning. */
9890 else
9892 if (nargs == 0)
9894 /* Evidently an out of date version of <stdarg.h>; can't validate
9895 va_start's second argument, but can still work as intended. */
9896 warning_at (current_location,
9897 OPT_Wvarargs,
9898 "%<__builtin_next_arg%> called without an argument");
9899 return true;
9901 else if (nargs > 1)
9903 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9904 return true;
9906 arg = CALL_EXPR_ARG (exp, 0);
9909 if (TREE_CODE (arg) == SSA_NAME
9910 && SSA_NAME_VAR (arg))
9911 arg = SSA_NAME_VAR (arg);
9913 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9914 or __builtin_next_arg (0) the first time we see it, after checking
9915 the arguments and if needed issuing a warning. */
9916 if (!integer_zerop (arg))
9918 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9920 /* Strip off all nops for the sake of the comparison. This
9921 is not quite the same as STRIP_NOPS. It does more.
9922 We must also strip off INDIRECT_EXPR for C++ reference
9923 parameters. */
9924 while (CONVERT_EXPR_P (arg)
9925 || TREE_CODE (arg) == INDIRECT_REF)
9926 arg = TREE_OPERAND (arg, 0);
9927 if (arg != last_parm)
9929 /* FIXME: Sometimes with the tree optimizers we can get the
9930 not the last argument even though the user used the last
9931 argument. We just warn and set the arg to be the last
9932 argument so that we will get wrong-code because of
9933 it. */
9934 warning_at (current_location,
9935 OPT_Wvarargs,
9936 "second parameter of %<va_start%> not last named argument");
9939 /* Undefined by C99 7.15.1.4p4 (va_start):
9940 "If the parameter parmN is declared with the register storage
9941 class, with a function or array type, or with a type that is
9942 not compatible with the type that results after application of
9943 the default argument promotions, the behavior is undefined."
9945 else if (DECL_REGISTER (arg))
9947 warning_at (current_location,
9948 OPT_Wvarargs,
9949 "undefined behavior when second parameter of "
9950 "%<va_start%> is declared with %<register%> storage");
9953 /* We want to verify the second parameter just once before the tree
9954 optimizers are run and then avoid keeping it in the tree,
9955 as otherwise we could warn even for correct code like:
9956 void foo (int i, ...)
9957 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9958 if (va_start_p)
9959 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9960 else
9961 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9963 return false;
9967 /* Expand a call EXP to __builtin_object_size. */
9969 static rtx
9970 expand_builtin_object_size (tree exp)
9972 tree ost;
9973 int object_size_type;
9974 tree fndecl = get_callee_fndecl (exp);
9976 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9978 error ("first argument of %qD must be a pointer, second integer constant",
9979 fndecl);
9980 expand_builtin_trap ();
9981 return const0_rtx;
9984 ost = CALL_EXPR_ARG (exp, 1);
9985 STRIP_NOPS (ost);
9987 if (TREE_CODE (ost) != INTEGER_CST
9988 || tree_int_cst_sgn (ost) < 0
9989 || compare_tree_int (ost, 3) > 0)
9991 error ("last argument of %qD is not integer constant between 0 and 3",
9992 fndecl);
9993 expand_builtin_trap ();
9994 return const0_rtx;
9997 object_size_type = tree_to_shwi (ost);
9999 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10002 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10003 FCODE is the BUILT_IN_* to use.
10004 Return NULL_RTX if we failed; the caller should emit a normal call,
10005 otherwise try to get the result in TARGET, if convenient (and in
10006 mode MODE if that's convenient). */
10008 static rtx
10009 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10010 enum built_in_function fcode)
10012 if (!validate_arglist (exp,
10013 POINTER_TYPE,
10014 fcode == BUILT_IN_MEMSET_CHK
10015 ? INTEGER_TYPE : POINTER_TYPE,
10016 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10017 return NULL_RTX;
10019 tree dest = CALL_EXPR_ARG (exp, 0);
10020 tree src = CALL_EXPR_ARG (exp, 1);
10021 tree len = CALL_EXPR_ARG (exp, 2);
10022 tree size = CALL_EXPR_ARG (exp, 3);
10024 /* FIXME: Set access mode to write only for memset et al. */
10025 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10026 /*srcstr=*/NULL_TREE, size, access_read_write);
10028 if (!tree_fits_uhwi_p (size))
10029 return NULL_RTX;
10031 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10033 /* Avoid transforming the checking call to an ordinary one when
10034 an overflow has been detected or when the call couldn't be
10035 validated because the size is not constant. */
10036 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10037 return NULL_RTX;
10039 tree fn = NULL_TREE;
10040 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10041 mem{cpy,pcpy,move,set} is available. */
10042 switch (fcode)
10044 case BUILT_IN_MEMCPY_CHK:
10045 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10046 break;
10047 case BUILT_IN_MEMPCPY_CHK:
10048 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10049 break;
10050 case BUILT_IN_MEMMOVE_CHK:
10051 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10052 break;
10053 case BUILT_IN_MEMSET_CHK:
10054 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10055 break;
10056 default:
10057 break;
10060 if (! fn)
10061 return NULL_RTX;
10063 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10064 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10065 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10066 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10068 else if (fcode == BUILT_IN_MEMSET_CHK)
10069 return NULL_RTX;
10070 else
10072 unsigned int dest_align = get_pointer_alignment (dest);
10074 /* If DEST is not a pointer type, call the normal function. */
10075 if (dest_align == 0)
10076 return NULL_RTX;
10078 /* If SRC and DEST are the same (and not volatile), do nothing. */
10079 if (operand_equal_p (src, dest, 0))
10081 tree expr;
10083 if (fcode != BUILT_IN_MEMPCPY_CHK)
10085 /* Evaluate and ignore LEN in case it has side-effects. */
10086 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10087 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10090 expr = fold_build_pointer_plus (dest, len);
10091 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10094 /* __memmove_chk special case. */
10095 if (fcode == BUILT_IN_MEMMOVE_CHK)
10097 unsigned int src_align = get_pointer_alignment (src);
10099 if (src_align == 0)
10100 return NULL_RTX;
10102 /* If src is categorized for a readonly section we can use
10103 normal __memcpy_chk. */
10104 if (readonly_data_expr (src))
10106 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10107 if (!fn)
10108 return NULL_RTX;
10109 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10110 dest, src, len, size);
10111 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10112 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10113 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10116 return NULL_RTX;
10120 /* Emit warning if a buffer overflow is detected at compile time. */
10122 static void
10123 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10125 /* The source string. */
10126 tree srcstr = NULL_TREE;
10127 /* The size of the destination object returned by __builtin_object_size. */
10128 tree objsize = NULL_TREE;
10129 /* The string that is being concatenated with (as in __strcat_chk)
10130 or null if it isn't. */
10131 tree catstr = NULL_TREE;
10132 /* The maximum length of the source sequence in a bounded operation
10133 (such as __strncat_chk) or null if the operation isn't bounded
10134 (such as __strcat_chk). */
10135 tree maxread = NULL_TREE;
10136 /* The exact size of the access (such as in __strncpy_chk). */
10137 tree size = NULL_TREE;
10138 /* The access by the function that's checked. Except for snprintf
10139 both writing and reading is checked. */
10140 access_mode mode = access_read_write;
10142 switch (fcode)
10144 case BUILT_IN_STRCPY_CHK:
10145 case BUILT_IN_STPCPY_CHK:
10146 srcstr = CALL_EXPR_ARG (exp, 1);
10147 objsize = CALL_EXPR_ARG (exp, 2);
10148 break;
10150 case BUILT_IN_STRCAT_CHK:
10151 /* For __strcat_chk the warning will be emitted only if overflowing
10152 by at least strlen (dest) + 1 bytes. */
10153 catstr = CALL_EXPR_ARG (exp, 0);
10154 srcstr = CALL_EXPR_ARG (exp, 1);
10155 objsize = CALL_EXPR_ARG (exp, 2);
10156 break;
10158 case BUILT_IN_STRNCAT_CHK:
10159 catstr = CALL_EXPR_ARG (exp, 0);
10160 srcstr = CALL_EXPR_ARG (exp, 1);
10161 maxread = CALL_EXPR_ARG (exp, 2);
10162 objsize = CALL_EXPR_ARG (exp, 3);
10163 break;
10165 case BUILT_IN_STRNCPY_CHK:
10166 case BUILT_IN_STPNCPY_CHK:
10167 srcstr = CALL_EXPR_ARG (exp, 1);
10168 size = CALL_EXPR_ARG (exp, 2);
10169 objsize = CALL_EXPR_ARG (exp, 3);
10170 break;
10172 case BUILT_IN_SNPRINTF_CHK:
10173 case BUILT_IN_VSNPRINTF_CHK:
10174 maxread = CALL_EXPR_ARG (exp, 1);
10175 objsize = CALL_EXPR_ARG (exp, 3);
10176 /* The only checked access the write to the destination. */
10177 mode = access_write_only;
10178 break;
10179 default:
10180 gcc_unreachable ();
10183 if (catstr && maxread)
10185 /* Check __strncat_chk. There is no way to determine the length
10186 of the string to which the source string is being appended so
10187 just warn when the length of the source string is not known. */
10188 check_strncat_sizes (exp, objsize);
10189 return;
10192 check_access (exp, size, maxread, srcstr, objsize, mode);
10195 /* Emit warning if a buffer overflow is detected at compile time
10196 in __sprintf_chk/__vsprintf_chk calls. */
10198 static void
10199 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10201 tree size, len, fmt;
10202 const char *fmt_str;
10203 int nargs = call_expr_nargs (exp);
10205 /* Verify the required arguments in the original call. */
10207 if (nargs < 4)
10208 return;
10209 size = CALL_EXPR_ARG (exp, 2);
10210 fmt = CALL_EXPR_ARG (exp, 3);
10212 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10213 return;
10215 /* Check whether the format is a literal string constant. */
10216 fmt_str = c_getstr (fmt);
10217 if (fmt_str == NULL)
10218 return;
10220 if (!init_target_chars ())
10221 return;
10223 /* If the format doesn't contain % args or %%, we know its size. */
10224 if (strchr (fmt_str, target_percent) == 0)
10225 len = build_int_cstu (size_type_node, strlen (fmt_str));
10226 /* If the format is "%s" and first ... argument is a string literal,
10227 we know it too. */
10228 else if (fcode == BUILT_IN_SPRINTF_CHK
10229 && strcmp (fmt_str, target_percent_s) == 0)
10231 tree arg;
10233 if (nargs < 5)
10234 return;
10235 arg = CALL_EXPR_ARG (exp, 4);
10236 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10237 return;
10239 len = c_strlen (arg, 1);
10240 if (!len || ! tree_fits_uhwi_p (len))
10241 return;
10243 else
10244 return;
10246 /* Add one for the terminating nul. */
10247 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10249 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10250 access_write_only);
10253 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10254 if possible. */
10256 static tree
10257 fold_builtin_object_size (tree ptr, tree ost)
10259 unsigned HOST_WIDE_INT bytes;
10260 int object_size_type;
10262 if (!validate_arg (ptr, POINTER_TYPE)
10263 || !validate_arg (ost, INTEGER_TYPE))
10264 return NULL_TREE;
10266 STRIP_NOPS (ost);
10268 if (TREE_CODE (ost) != INTEGER_CST
10269 || tree_int_cst_sgn (ost) < 0
10270 || compare_tree_int (ost, 3) > 0)
10271 return NULL_TREE;
10273 object_size_type = tree_to_shwi (ost);
10275 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10276 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10277 and (size_t) 0 for types 2 and 3. */
10278 if (TREE_SIDE_EFFECTS (ptr))
10279 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10281 if (TREE_CODE (ptr) == ADDR_EXPR)
10283 compute_builtin_object_size (ptr, object_size_type, &bytes);
10284 if (wi::fits_to_tree_p (bytes, size_type_node))
10285 return build_int_cstu (size_type_node, bytes);
10287 else if (TREE_CODE (ptr) == SSA_NAME)
10289 /* If object size is not known yet, delay folding until
10290 later. Maybe subsequent passes will help determining
10291 it. */
10292 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10293 && wi::fits_to_tree_p (bytes, size_type_node))
10294 return build_int_cstu (size_type_node, bytes);
10297 return NULL_TREE;
10300 /* Builtins with folding operations that operate on "..." arguments
10301 need special handling; we need to store the arguments in a convenient
10302 data structure before attempting any folding. Fortunately there are
10303 only a few builtins that fall into this category. FNDECL is the
10304 function, EXP is the CALL_EXPR for the call. */
10306 static tree
10307 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10309 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10310 tree ret = NULL_TREE;
10312 switch (fcode)
10314 case BUILT_IN_FPCLASSIFY:
10315 ret = fold_builtin_fpclassify (loc, args, nargs);
10316 break;
10318 default:
10319 break;
10321 if (ret)
10323 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10324 SET_EXPR_LOCATION (ret, loc);
10325 suppress_warning (ret);
10326 return ret;
10328 return NULL_TREE;
10331 /* Initialize format string characters in the target charset. */
10333 bool
10334 init_target_chars (void)
10336 static bool init;
10337 if (!init)
10339 target_newline = lang_hooks.to_target_charset ('\n');
10340 target_percent = lang_hooks.to_target_charset ('%');
10341 target_c = lang_hooks.to_target_charset ('c');
10342 target_s = lang_hooks.to_target_charset ('s');
10343 if (target_newline == 0 || target_percent == 0 || target_c == 0
10344 || target_s == 0)
10345 return false;
10347 target_percent_c[0] = target_percent;
10348 target_percent_c[1] = target_c;
10349 target_percent_c[2] = '\0';
10351 target_percent_s[0] = target_percent;
10352 target_percent_s[1] = target_s;
10353 target_percent_s[2] = '\0';
10355 target_percent_s_newline[0] = target_percent;
10356 target_percent_s_newline[1] = target_s;
10357 target_percent_s_newline[2] = target_newline;
10358 target_percent_s_newline[3] = '\0';
10360 init = true;
10362 return true;
10365 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10366 and no overflow/underflow occurred. INEXACT is true if M was not
10367 exactly calculated. TYPE is the tree type for the result. This
10368 function assumes that you cleared the MPFR flags and then
10369 calculated M to see if anything subsequently set a flag prior to
10370 entering this function. Return NULL_TREE if any checks fail. */
10372 static tree
10373 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10375 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10376 overflow/underflow occurred. If -frounding-math, proceed iff the
10377 result of calling FUNC was exact. */
10378 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10379 && (!flag_rounding_math || !inexact))
10381 REAL_VALUE_TYPE rr;
10383 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10384 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10385 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10386 but the mpft_t is not, then we underflowed in the
10387 conversion. */
10388 if (real_isfinite (&rr)
10389 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10391 REAL_VALUE_TYPE rmode;
10393 real_convert (&rmode, TYPE_MODE (type), &rr);
10394 /* Proceed iff the specified mode can hold the value. */
10395 if (real_identical (&rmode, &rr))
10396 return build_real (type, rmode);
10399 return NULL_TREE;
10402 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10403 number and no overflow/underflow occurred. INEXACT is true if M
10404 was not exactly calculated. TYPE is the tree type for the result.
10405 This function assumes that you cleared the MPFR flags and then
10406 calculated M to see if anything subsequently set a flag prior to
10407 entering this function. Return NULL_TREE if any checks fail, if
10408 FORCE_CONVERT is true, then bypass the checks. */
10410 static tree
10411 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10413 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10414 overflow/underflow occurred. If -frounding-math, proceed iff the
10415 result of calling FUNC was exact. */
10416 if (force_convert
10417 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10418 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10419 && (!flag_rounding_math || !inexact)))
10421 REAL_VALUE_TYPE re, im;
10423 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10424 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10425 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10426 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10427 but the mpft_t is not, then we underflowed in the
10428 conversion. */
10429 if (force_convert
10430 || (real_isfinite (&re) && real_isfinite (&im)
10431 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10432 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10434 REAL_VALUE_TYPE re_mode, im_mode;
10436 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10437 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10438 /* Proceed iff the specified mode can hold the value. */
10439 if (force_convert
10440 || (real_identical (&re_mode, &re)
10441 && real_identical (&im_mode, &im)))
10442 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10443 build_real (TREE_TYPE (type), im_mode));
10446 return NULL_TREE;
10449 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10450 the pointer *(ARG_QUO) and return the result. The type is taken
10451 from the type of ARG0 and is used for setting the precision of the
10452 calculation and results. */
10454 static tree
10455 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10457 tree const type = TREE_TYPE (arg0);
10458 tree result = NULL_TREE;
10460 STRIP_NOPS (arg0);
10461 STRIP_NOPS (arg1);
10463 /* To proceed, MPFR must exactly represent the target floating point
10464 format, which only happens when the target base equals two. */
10465 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10466 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10467 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10469 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10470 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10472 if (real_isfinite (ra0) && real_isfinite (ra1))
10474 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10475 const int prec = fmt->p;
10476 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10477 tree result_rem;
10478 long integer_quo;
10479 mpfr_t m0, m1;
10481 mpfr_inits2 (prec, m0, m1, NULL);
10482 mpfr_from_real (m0, ra0, MPFR_RNDN);
10483 mpfr_from_real (m1, ra1, MPFR_RNDN);
10484 mpfr_clear_flags ();
10485 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10486 /* Remquo is independent of the rounding mode, so pass
10487 inexact=0 to do_mpfr_ckconv(). */
10488 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10489 mpfr_clears (m0, m1, NULL);
10490 if (result_rem)
10492 /* MPFR calculates quo in the host's long so it may
10493 return more bits in quo than the target int can hold
10494 if sizeof(host long) > sizeof(target int). This can
10495 happen even for native compilers in LP64 mode. In
10496 these cases, modulo the quo value with the largest
10497 number that the target int can hold while leaving one
10498 bit for the sign. */
10499 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10500 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10502 /* Dereference the quo pointer argument. */
10503 arg_quo = build_fold_indirect_ref (arg_quo);
10504 /* Proceed iff a valid pointer type was passed in. */
10505 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10507 /* Set the value. */
10508 tree result_quo
10509 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10510 build_int_cst (TREE_TYPE (arg_quo),
10511 integer_quo));
10512 TREE_SIDE_EFFECTS (result_quo) = 1;
10513 /* Combine the quo assignment with the rem. */
10514 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10515 result_quo, result_rem));
10520 return result;
10523 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10524 resulting value as a tree with type TYPE. The mpfr precision is
10525 set to the precision of TYPE. We assume that this mpfr function
10526 returns zero if the result could be calculated exactly within the
10527 requested precision. In addition, the integer pointer represented
10528 by ARG_SG will be dereferenced and set to the appropriate signgam
10529 (-1,1) value. */
10531 static tree
10532 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10534 tree result = NULL_TREE;
10536 STRIP_NOPS (arg);
10538 /* To proceed, MPFR must exactly represent the target floating point
10539 format, which only happens when the target base equals two. Also
10540 verify ARG is a constant and that ARG_SG is an int pointer. */
10541 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10542 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10543 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10544 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10546 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10548 /* In addition to NaN and Inf, the argument cannot be zero or a
10549 negative integer. */
10550 if (real_isfinite (ra)
10551 && ra->cl != rvc_zero
10552 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10554 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10555 const int prec = fmt->p;
10556 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10557 int inexact, sg;
10558 mpfr_t m;
10559 tree result_lg;
10561 mpfr_init2 (m, prec);
10562 mpfr_from_real (m, ra, MPFR_RNDN);
10563 mpfr_clear_flags ();
10564 inexact = mpfr_lgamma (m, &sg, m, rnd);
10565 result_lg = do_mpfr_ckconv (m, type, inexact);
10566 mpfr_clear (m);
10567 if (result_lg)
10569 tree result_sg;
10571 /* Dereference the arg_sg pointer argument. */
10572 arg_sg = build_fold_indirect_ref (arg_sg);
10573 /* Assign the signgam value into *arg_sg. */
10574 result_sg = fold_build2 (MODIFY_EXPR,
10575 TREE_TYPE (arg_sg), arg_sg,
10576 build_int_cst (TREE_TYPE (arg_sg), sg));
10577 TREE_SIDE_EFFECTS (result_sg) = 1;
10578 /* Combine the signgam assignment with the lgamma result. */
10579 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10580 result_sg, result_lg));
10585 return result;
10588 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10589 mpc function FUNC on it and return the resulting value as a tree
10590 with type TYPE. The mpfr precision is set to the precision of
10591 TYPE. We assume that function FUNC returns zero if the result
10592 could be calculated exactly within the requested precision. If
10593 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10594 in the arguments and/or results. */
10596 tree
10597 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10598 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10600 tree result = NULL_TREE;
10602 STRIP_NOPS (arg0);
10603 STRIP_NOPS (arg1);
10605 /* To proceed, MPFR must exactly represent the target floating point
10606 format, which only happens when the target base equals two. */
10607 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10609 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10610 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10611 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10613 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10614 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10615 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10616 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10618 if (do_nonfinite
10619 || (real_isfinite (re0) && real_isfinite (im0)
10620 && real_isfinite (re1) && real_isfinite (im1)))
10622 const struct real_format *const fmt =
10623 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10624 const int prec = fmt->p;
10625 const mpfr_rnd_t rnd = fmt->round_towards_zero
10626 ? MPFR_RNDZ : MPFR_RNDN;
10627 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10628 int inexact;
10629 mpc_t m0, m1;
10631 mpc_init2 (m0, prec);
10632 mpc_init2 (m1, prec);
10633 mpfr_from_real (mpc_realref (m0), re0, rnd);
10634 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10635 mpfr_from_real (mpc_realref (m1), re1, rnd);
10636 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10637 mpfr_clear_flags ();
10638 inexact = func (m0, m0, m1, crnd);
10639 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10640 mpc_clear (m0);
10641 mpc_clear (m1);
10645 return result;
10648 /* A wrapper function for builtin folding that prevents warnings for
10649 "statement without effect" and the like, caused by removing the
10650 call node earlier than the warning is generated. */
10652 tree
10653 fold_call_stmt (gcall *stmt, bool ignore)
10655 tree ret = NULL_TREE;
10656 tree fndecl = gimple_call_fndecl (stmt);
10657 location_t loc = gimple_location (stmt);
10658 if (fndecl && fndecl_built_in_p (fndecl)
10659 && !gimple_call_va_arg_pack_p (stmt))
10661 int nargs = gimple_call_num_args (stmt);
10662 tree *args = (nargs > 0
10663 ? gimple_call_arg_ptr (stmt, 0)
10664 : &error_mark_node);
10666 if (avoid_folding_inline_builtin (fndecl))
10667 return NULL_TREE;
10668 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10670 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10672 else
10674 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10675 if (ret)
10677 /* Propagate location information from original call to
10678 expansion of builtin. Otherwise things like
10679 maybe_emit_chk_warning, that operate on the expansion
10680 of a builtin, will use the wrong location information. */
10681 if (gimple_has_location (stmt))
10683 tree realret = ret;
10684 if (TREE_CODE (ret) == NOP_EXPR)
10685 realret = TREE_OPERAND (ret, 0);
10686 if (CAN_HAVE_LOCATION_P (realret)
10687 && !EXPR_HAS_LOCATION (realret))
10688 SET_EXPR_LOCATION (realret, loc);
10689 return realret;
10691 return ret;
10695 return NULL_TREE;
10698 /* Look up the function in builtin_decl that corresponds to DECL
10699 and set ASMSPEC as its user assembler name. DECL must be a
10700 function decl that declares a builtin. */
10702 void
10703 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10705 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10706 && asmspec != 0);
10708 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10709 set_user_assembler_name (builtin, asmspec);
10711 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10712 && INT_TYPE_SIZE < BITS_PER_WORD)
10714 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10715 set_user_assembler_libfunc ("ffs", asmspec);
10716 set_optab_libfunc (ffs_optab, mode, "ffs");
10720 /* Return true if DECL is a builtin that expands to a constant or similarly
10721 simple code. */
10722 bool
10723 is_simple_builtin (tree decl)
10725 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10726 switch (DECL_FUNCTION_CODE (decl))
10728 /* Builtins that expand to constants. */
10729 case BUILT_IN_CONSTANT_P:
10730 case BUILT_IN_EXPECT:
10731 case BUILT_IN_OBJECT_SIZE:
10732 case BUILT_IN_UNREACHABLE:
10733 /* Simple register moves or loads from stack. */
10734 case BUILT_IN_ASSUME_ALIGNED:
10735 case BUILT_IN_RETURN_ADDRESS:
10736 case BUILT_IN_EXTRACT_RETURN_ADDR:
10737 case BUILT_IN_FROB_RETURN_ADDR:
10738 case BUILT_IN_RETURN:
10739 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10740 case BUILT_IN_FRAME_ADDRESS:
10741 case BUILT_IN_VA_END:
10742 case BUILT_IN_STACK_SAVE:
10743 case BUILT_IN_STACK_RESTORE:
10744 /* Exception state returns or moves registers around. */
10745 case BUILT_IN_EH_FILTER:
10746 case BUILT_IN_EH_POINTER:
10747 case BUILT_IN_EH_COPY_VALUES:
10748 return true;
10750 default:
10751 return false;
10754 return false;
10757 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10758 most probably expanded inline into reasonably simple code. This is a
10759 superset of is_simple_builtin. */
10760 bool
10761 is_inexpensive_builtin (tree decl)
10763 if (!decl)
10764 return false;
10765 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10766 return true;
10767 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10768 switch (DECL_FUNCTION_CODE (decl))
10770 case BUILT_IN_ABS:
10771 CASE_BUILT_IN_ALLOCA:
10772 case BUILT_IN_BSWAP16:
10773 case BUILT_IN_BSWAP32:
10774 case BUILT_IN_BSWAP64:
10775 case BUILT_IN_BSWAP128:
10776 case BUILT_IN_CLZ:
10777 case BUILT_IN_CLZIMAX:
10778 case BUILT_IN_CLZL:
10779 case BUILT_IN_CLZLL:
10780 case BUILT_IN_CTZ:
10781 case BUILT_IN_CTZIMAX:
10782 case BUILT_IN_CTZL:
10783 case BUILT_IN_CTZLL:
10784 case BUILT_IN_FFS:
10785 case BUILT_IN_FFSIMAX:
10786 case BUILT_IN_FFSL:
10787 case BUILT_IN_FFSLL:
10788 case BUILT_IN_IMAXABS:
10789 case BUILT_IN_FINITE:
10790 case BUILT_IN_FINITEF:
10791 case BUILT_IN_FINITEL:
10792 case BUILT_IN_FINITED32:
10793 case BUILT_IN_FINITED64:
10794 case BUILT_IN_FINITED128:
10795 case BUILT_IN_FPCLASSIFY:
10796 case BUILT_IN_ISFINITE:
10797 case BUILT_IN_ISINF_SIGN:
10798 case BUILT_IN_ISINF:
10799 case BUILT_IN_ISINFF:
10800 case BUILT_IN_ISINFL:
10801 case BUILT_IN_ISINFD32:
10802 case BUILT_IN_ISINFD64:
10803 case BUILT_IN_ISINFD128:
10804 case BUILT_IN_ISNAN:
10805 case BUILT_IN_ISNANF:
10806 case BUILT_IN_ISNANL:
10807 case BUILT_IN_ISNAND32:
10808 case BUILT_IN_ISNAND64:
10809 case BUILT_IN_ISNAND128:
10810 case BUILT_IN_ISNORMAL:
10811 case BUILT_IN_ISGREATER:
10812 case BUILT_IN_ISGREATEREQUAL:
10813 case BUILT_IN_ISLESS:
10814 case BUILT_IN_ISLESSEQUAL:
10815 case BUILT_IN_ISLESSGREATER:
10816 case BUILT_IN_ISUNORDERED:
10817 case BUILT_IN_VA_ARG_PACK:
10818 case BUILT_IN_VA_ARG_PACK_LEN:
10819 case BUILT_IN_VA_COPY:
10820 case BUILT_IN_TRAP:
10821 case BUILT_IN_SAVEREGS:
10822 case BUILT_IN_POPCOUNTL:
10823 case BUILT_IN_POPCOUNTLL:
10824 case BUILT_IN_POPCOUNTIMAX:
10825 case BUILT_IN_POPCOUNT:
10826 case BUILT_IN_PARITYL:
10827 case BUILT_IN_PARITYLL:
10828 case BUILT_IN_PARITYIMAX:
10829 case BUILT_IN_PARITY:
10830 case BUILT_IN_LABS:
10831 case BUILT_IN_LLABS:
10832 case BUILT_IN_PREFETCH:
10833 case BUILT_IN_ACC_ON_DEVICE:
10834 return true;
10836 default:
10837 return is_simple_builtin (decl);
10840 return false;
10843 /* Return true if T is a constant and the value cast to a target char
10844 can be represented by a host char.
10845 Store the casted char constant in *P if so. */
10847 bool
10848 target_char_cst_p (tree t, char *p)
10850 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10851 return false;
10853 *p = (char)tree_to_uhwi (t);
10854 return true;
10857 /* Return true if the builtin DECL is implemented in a standard library.
10858 Otherwise return false which doesn't guarantee it is not (thus the list
10859 of handled builtins below may be incomplete). */
10861 bool
10862 builtin_with_linkage_p (tree decl)
10864 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10865 switch (DECL_FUNCTION_CODE (decl))
10867 CASE_FLT_FN (BUILT_IN_ACOS):
10868 CASE_FLT_FN (BUILT_IN_ACOSH):
10869 CASE_FLT_FN (BUILT_IN_ASIN):
10870 CASE_FLT_FN (BUILT_IN_ASINH):
10871 CASE_FLT_FN (BUILT_IN_ATAN):
10872 CASE_FLT_FN (BUILT_IN_ATANH):
10873 CASE_FLT_FN (BUILT_IN_ATAN2):
10874 CASE_FLT_FN (BUILT_IN_CBRT):
10875 CASE_FLT_FN (BUILT_IN_CEIL):
10876 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
10877 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10878 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
10879 CASE_FLT_FN (BUILT_IN_COS):
10880 CASE_FLT_FN (BUILT_IN_COSH):
10881 CASE_FLT_FN (BUILT_IN_ERF):
10882 CASE_FLT_FN (BUILT_IN_ERFC):
10883 CASE_FLT_FN (BUILT_IN_EXP):
10884 CASE_FLT_FN (BUILT_IN_EXP2):
10885 CASE_FLT_FN (BUILT_IN_EXPM1):
10886 CASE_FLT_FN (BUILT_IN_FABS):
10887 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10888 CASE_FLT_FN (BUILT_IN_FDIM):
10889 CASE_FLT_FN (BUILT_IN_FLOOR):
10890 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
10891 CASE_FLT_FN (BUILT_IN_FMA):
10892 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
10893 CASE_FLT_FN (BUILT_IN_FMAX):
10894 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
10895 CASE_FLT_FN (BUILT_IN_FMIN):
10896 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
10897 CASE_FLT_FN (BUILT_IN_FMOD):
10898 CASE_FLT_FN (BUILT_IN_FREXP):
10899 CASE_FLT_FN (BUILT_IN_HYPOT):
10900 CASE_FLT_FN (BUILT_IN_ILOGB):
10901 CASE_FLT_FN (BUILT_IN_LDEXP):
10902 CASE_FLT_FN (BUILT_IN_LGAMMA):
10903 CASE_FLT_FN (BUILT_IN_LLRINT):
10904 CASE_FLT_FN (BUILT_IN_LLROUND):
10905 CASE_FLT_FN (BUILT_IN_LOG):
10906 CASE_FLT_FN (BUILT_IN_LOG10):
10907 CASE_FLT_FN (BUILT_IN_LOG1P):
10908 CASE_FLT_FN (BUILT_IN_LOG2):
10909 CASE_FLT_FN (BUILT_IN_LOGB):
10910 CASE_FLT_FN (BUILT_IN_LRINT):
10911 CASE_FLT_FN (BUILT_IN_LROUND):
10912 CASE_FLT_FN (BUILT_IN_MODF):
10913 CASE_FLT_FN (BUILT_IN_NAN):
10914 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10915 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
10916 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
10917 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
10918 CASE_FLT_FN (BUILT_IN_POW):
10919 CASE_FLT_FN (BUILT_IN_REMAINDER):
10920 CASE_FLT_FN (BUILT_IN_REMQUO):
10921 CASE_FLT_FN (BUILT_IN_RINT):
10922 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
10923 CASE_FLT_FN (BUILT_IN_ROUND):
10924 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
10925 CASE_FLT_FN (BUILT_IN_SCALBLN):
10926 CASE_FLT_FN (BUILT_IN_SCALBN):
10927 CASE_FLT_FN (BUILT_IN_SIN):
10928 CASE_FLT_FN (BUILT_IN_SINH):
10929 CASE_FLT_FN (BUILT_IN_SINCOS):
10930 CASE_FLT_FN (BUILT_IN_SQRT):
10931 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
10932 CASE_FLT_FN (BUILT_IN_TAN):
10933 CASE_FLT_FN (BUILT_IN_TANH):
10934 CASE_FLT_FN (BUILT_IN_TGAMMA):
10935 CASE_FLT_FN (BUILT_IN_TRUNC):
10936 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
10937 return true;
10939 case BUILT_IN_STPCPY:
10940 case BUILT_IN_STPNCPY:
10941 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
10942 by libiberty's stpcpy.c for MinGW targets so we need to return true
10943 in order to be able to build libiberty in LTO mode for them. */
10944 return true;
10946 default:
10947 break;
10949 return false;
10952 /* Return true if OFFRNG is bounded to a subrange of offset values
10953 valid for the largest possible object. */
10955 bool
10956 access_ref::offset_bounded () const
10958 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
10959 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
10960 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
10963 /* If CALLEE has known side effects, fill in INFO and return true.
10964 See tree-ssa-structalias.c:find_func_aliases
10965 for the list of builtins we might need to handle here. */
10967 attr_fnspec
10968 builtin_fnspec (tree callee)
10970 built_in_function code = DECL_FUNCTION_CODE (callee);
10972 switch (code)
10974 /* All the following functions read memory pointed to by
10975 their second argument and write memory pointed to by first
10976 argument.
10977 strcat/strncat additionally reads memory pointed to by the first
10978 argument. */
10979 case BUILT_IN_STRCAT:
10980 case BUILT_IN_STRCAT_CHK:
10981 return "1cW 1 ";
10982 case BUILT_IN_STRNCAT:
10983 case BUILT_IN_STRNCAT_CHK:
10984 return "1cW 13";
10985 case BUILT_IN_STRCPY:
10986 case BUILT_IN_STRCPY_CHK:
10987 return "1cO 1 ";
10988 case BUILT_IN_STPCPY:
10989 case BUILT_IN_STPCPY_CHK:
10990 return ".cO 1 ";
10991 case BUILT_IN_STRNCPY:
10992 case BUILT_IN_MEMCPY:
10993 case BUILT_IN_MEMMOVE:
10994 case BUILT_IN_TM_MEMCPY:
10995 case BUILT_IN_TM_MEMMOVE:
10996 case BUILT_IN_STRNCPY_CHK:
10997 case BUILT_IN_MEMCPY_CHK:
10998 case BUILT_IN_MEMMOVE_CHK:
10999 return "1cO313";
11000 case BUILT_IN_MEMPCPY:
11001 case BUILT_IN_MEMPCPY_CHK:
11002 return ".cO313";
11003 case BUILT_IN_STPNCPY:
11004 case BUILT_IN_STPNCPY_CHK:
11005 return ".cO313";
11006 case BUILT_IN_BCOPY:
11007 return ".c23O3";
11008 case BUILT_IN_BZERO:
11009 return ".cO2";
11010 case BUILT_IN_MEMCMP:
11011 case BUILT_IN_MEMCMP_EQ:
11012 case BUILT_IN_BCMP:
11013 case BUILT_IN_STRNCMP:
11014 case BUILT_IN_STRNCMP_EQ:
11015 case BUILT_IN_STRNCASECMP:
11016 return ".cR3R3";
11018 /* The following functions read memory pointed to by their
11019 first argument. */
11020 CASE_BUILT_IN_TM_LOAD (1):
11021 CASE_BUILT_IN_TM_LOAD (2):
11022 CASE_BUILT_IN_TM_LOAD (4):
11023 CASE_BUILT_IN_TM_LOAD (8):
11024 CASE_BUILT_IN_TM_LOAD (FLOAT):
11025 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11026 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11027 CASE_BUILT_IN_TM_LOAD (M64):
11028 CASE_BUILT_IN_TM_LOAD (M128):
11029 CASE_BUILT_IN_TM_LOAD (M256):
11030 case BUILT_IN_TM_LOG:
11031 case BUILT_IN_TM_LOG_1:
11032 case BUILT_IN_TM_LOG_2:
11033 case BUILT_IN_TM_LOG_4:
11034 case BUILT_IN_TM_LOG_8:
11035 case BUILT_IN_TM_LOG_FLOAT:
11036 case BUILT_IN_TM_LOG_DOUBLE:
11037 case BUILT_IN_TM_LOG_LDOUBLE:
11038 case BUILT_IN_TM_LOG_M64:
11039 case BUILT_IN_TM_LOG_M128:
11040 case BUILT_IN_TM_LOG_M256:
11041 return ".cR ";
11043 case BUILT_IN_INDEX:
11044 case BUILT_IN_RINDEX:
11045 case BUILT_IN_STRCHR:
11046 case BUILT_IN_STRLEN:
11047 case BUILT_IN_STRRCHR:
11048 return ".cR ";
11049 case BUILT_IN_STRNLEN:
11050 return ".cR2";
11052 /* These read memory pointed to by the first argument.
11053 Allocating memory does not have any side-effects apart from
11054 being the definition point for the pointer.
11055 Unix98 specifies that errno is set on allocation failure. */
11056 case BUILT_IN_STRDUP:
11057 return "mCR ";
11058 case BUILT_IN_STRNDUP:
11059 return "mCR2";
11060 /* Allocating memory does not have any side-effects apart from
11061 being the definition point for the pointer. */
11062 case BUILT_IN_MALLOC:
11063 case BUILT_IN_ALIGNED_ALLOC:
11064 case BUILT_IN_CALLOC:
11065 case BUILT_IN_GOMP_ALLOC:
11066 return "mC";
11067 CASE_BUILT_IN_ALLOCA:
11068 return "mc";
11069 /* These read memory pointed to by the first argument with size
11070 in the third argument. */
11071 case BUILT_IN_MEMCHR:
11072 return ".cR3";
11073 /* These read memory pointed to by the first and second arguments. */
11074 case BUILT_IN_STRSTR:
11075 case BUILT_IN_STRPBRK:
11076 case BUILT_IN_STRCASECMP:
11077 case BUILT_IN_STRCSPN:
11078 case BUILT_IN_STRSPN:
11079 case BUILT_IN_STRCMP:
11080 case BUILT_IN_STRCMP_EQ:
11081 return ".cR R ";
11082 /* Freeing memory kills the pointed-to memory. More importantly
11083 the call has to serve as a barrier for moving loads and stores
11084 across it. */
11085 case BUILT_IN_STACK_RESTORE:
11086 case BUILT_IN_FREE:
11087 case BUILT_IN_GOMP_FREE:
11088 return ".co ";
11089 case BUILT_IN_VA_END:
11090 return ".cO ";
11091 /* Realloc serves both as allocation point and deallocation point. */
11092 case BUILT_IN_REALLOC:
11093 return ".Cw ";
11094 case BUILT_IN_GAMMA_R:
11095 case BUILT_IN_GAMMAF_R:
11096 case BUILT_IN_GAMMAL_R:
11097 case BUILT_IN_LGAMMA_R:
11098 case BUILT_IN_LGAMMAF_R:
11099 case BUILT_IN_LGAMMAL_R:
11100 return ".C. Ot";
11101 case BUILT_IN_FREXP:
11102 case BUILT_IN_FREXPF:
11103 case BUILT_IN_FREXPL:
11104 case BUILT_IN_MODF:
11105 case BUILT_IN_MODFF:
11106 case BUILT_IN_MODFL:
11107 return ".c. Ot";
11108 case BUILT_IN_REMQUO:
11109 case BUILT_IN_REMQUOF:
11110 case BUILT_IN_REMQUOL:
11111 return ".c. . Ot";
11112 case BUILT_IN_SINCOS:
11113 case BUILT_IN_SINCOSF:
11114 case BUILT_IN_SINCOSL:
11115 return ".c. OtOt";
11116 case BUILT_IN_MEMSET:
11117 case BUILT_IN_MEMSET_CHK:
11118 case BUILT_IN_TM_MEMSET:
11119 return "1cO3";
11120 CASE_BUILT_IN_TM_STORE (1):
11121 CASE_BUILT_IN_TM_STORE (2):
11122 CASE_BUILT_IN_TM_STORE (4):
11123 CASE_BUILT_IN_TM_STORE (8):
11124 CASE_BUILT_IN_TM_STORE (FLOAT):
11125 CASE_BUILT_IN_TM_STORE (DOUBLE):
11126 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11127 CASE_BUILT_IN_TM_STORE (M64):
11128 CASE_BUILT_IN_TM_STORE (M128):
11129 CASE_BUILT_IN_TM_STORE (M256):
11130 return ".cO ";
11131 case BUILT_IN_STACK_SAVE:
11132 return ".c";
11133 case BUILT_IN_ASSUME_ALIGNED:
11134 return "1cX ";
11135 /* But posix_memalign stores a pointer into the memory pointed to
11136 by its first argument. */
11137 case BUILT_IN_POSIX_MEMALIGN:
11138 return ".cOt";
11140 default:
11141 return "";