gccrs: add test case to show our query-type system is working
[official-gcc.git] / gcc / builtins.cc
blob1bfdc598eec530179879a479b54655bae8a88eef
1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
76 #include "tree-dfa.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_issignaling (tree, rtx);
127 static rtx expand_builtin_int_roundingfn (tree, rtx);
128 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 static rtx expand_builtin_next_arg (void);
130 static rtx expand_builtin_va_start (tree);
131 static rtx expand_builtin_va_end (tree);
132 static rtx expand_builtin_va_copy (tree);
133 static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 static rtx expand_builtin_strcmp (tree, rtx);
135 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 static rtx expand_builtin_memcpy (tree, rtx);
137 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 rtx target, tree exp,
139 memop_ret retmode,
140 bool might_overlap);
141 static rtx expand_builtin_memmove (tree, rtx);
142 static rtx expand_builtin_mempcpy (tree, rtx);
143 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 static rtx expand_builtin_strcpy (tree, rtx);
145 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static rtx expand_builtin_expect_with_probability (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 static bool validate_arg (const_tree, enum tree_code code);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 return (startswith (name, "__builtin_")
204 || startswith (name, "__sync_")
205 || startswith (name, "__atomic_"));
208 /* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
212 bool
213 called_as_built_in (tree node)
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
217 will have. */
218 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
219 return is_builtin_name (name);
222 /* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
232 whereas foo() itself starts on an even address.
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
237 bool
238 get_object_alignment_2 (tree exp, unsigned int *alignp,
239 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 poly_int64 bitsize, bitpos;
242 tree offset;
243 machine_mode mode;
244 int unsignedp, reversep, volatilep;
245 unsigned int align = BITS_PER_UNIT;
246 bool known_alignment = false;
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
250 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
251 &unsignedp, &reversep, &volatilep);
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
255 if (TREE_CODE (exp) == FUNCTION_DECL)
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
262 align = 2 * BITS_PER_UNIT;
264 else if (TREE_CODE (exp) == LABEL_DECL)
266 else if (TREE_CODE (exp) == CONST_DECL)
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp = DECL_INITIAL (exp);
270 align = TYPE_ALIGN (TREE_TYPE (exp));
271 if (CONSTANT_CLASS_P (exp))
272 align = targetm.constant_alignment (exp, align);
274 known_alignment = true;
276 else if (DECL_P (exp))
278 align = DECL_ALIGN (exp);
279 known_alignment = true;
281 else if (TREE_CODE (exp) == INDIRECT_REF
282 || TREE_CODE (exp) == MEM_REF
283 || TREE_CODE (exp) == TARGET_MEM_REF)
285 tree addr = TREE_OPERAND (exp, 0);
286 unsigned ptr_align;
287 unsigned HOST_WIDE_INT ptr_bitpos;
288 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 /* If the address is explicitely aligned, handle that. */
291 if (TREE_CODE (addr) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
295 ptr_bitmask *= BITS_PER_UNIT;
296 align = least_bit_hwi (ptr_bitmask);
297 addr = TREE_OPERAND (addr, 0);
300 known_alignment
301 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
302 align = MAX (ptr_align, align);
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos &= ptr_bitmask;
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
309 if (TREE_CODE (exp) == TARGET_MEM_REF)
311 if (TMR_INDEX (exp))
313 unsigned HOST_WIDE_INT step = 1;
314 if (TMR_STEP (exp))
315 step = TREE_INT_CST_LOW (TMR_STEP (exp));
316 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 if (TMR_INDEX2 (exp))
319 align = BITS_PER_UNIT;
320 known_alignment = false;
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
326 alignment knowledge and if using that alignment would
327 improve the situation. */
328 unsigned int talign;
329 if (!addr_p && !known_alignment
330 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
331 && talign > align)
332 align = talign;
333 else
335 /* Else adjust bitpos accordingly. */
336 bitpos += ptr_bitpos;
337 if (TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
339 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
342 else if (TREE_CODE (exp) == STRING_CST)
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 if (CONSTANT_CLASS_P (exp))
348 align = targetm.constant_alignment (exp, align);
350 known_alignment = true;
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
355 if (offset)
357 unsigned int trailing_zeros = tree_ctz (offset);
358 if (trailing_zeros < HOST_BITS_PER_INT)
360 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
361 if (inner)
362 align = MIN (align, inner);
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
369 if (alt_align != 0 && alt_align < align)
371 align = alt_align;
372 known_alignment = false;
375 *alignp = align;
376 *bitposp = bitpos.coeffs[0] & (align - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
390 with it. */
391 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
392 exp = TREE_OPERAND (exp, 0);
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* Compute the length of a null-terminated character string or wide
547 character string handling character sizes of 1, 2, and 4 bytes.
548 TREE_STRING_LENGTH is not the right way because it evaluates to
549 the size of the character array in bytes (as opposed to characters)
550 and because it can contain a zero byte in the middle.
552 ONLY_VALUE should be nonzero if the result is not going to be emitted
553 into the instruction stream and zero if it is going to be expanded.
554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
555 is returned, otherwise NULL, since
556 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
557 evaluate the side-effects.
559 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
560 accesses. Note that this implies the result is not going to be emitted
561 into the instruction stream.
563 Additional information about the string accessed may be recorded
564 in DATA. For example, if ARG references an unterminated string,
565 then the declaration will be stored in the DECL field. If the
566 length of the unterminated string can be determined, it'll be
567 stored in the LEN field. Note this length could well be different
568 than what a C strlen call would return.
570 ELTSIZE is 1 for normal single byte character strings, and 2 or
571 4 for wide characer strings. ELTSIZE is by default 1.
573 The value returned is of type `ssizetype'. */
575 tree
576 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
578 /* If we were not passed a DATA pointer, then get one to a local
579 structure. That avoids having to check DATA for NULL before
580 each time we want to use it. */
581 c_strlen_data local_strlen_data = { };
582 if (!data)
583 data = &local_strlen_data;
585 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
587 tree src = STRIP_NOPS (arg);
588 if (TREE_CODE (src) == COND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 tree len1, len2;
593 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
594 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
595 if (tree_int_cst_equal (len1, len2))
596 return len1;
599 if (TREE_CODE (src) == COMPOUND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
601 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
603 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
605 /* Offset from the beginning of the string in bytes. */
606 tree byteoff;
607 tree memsize;
608 tree decl;
609 src = string_constant (src, &byteoff, &memsize, &decl);
610 if (src == 0)
611 return NULL_TREE;
613 /* Determine the size of the string element. */
614 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
615 return NULL_TREE;
617 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
618 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
619 in case the latter is less than the size of the array, such as when
620 SRC refers to a short string literal used to initialize a large array.
621 In that case, the elements of the array after the terminating NUL are
622 all NUL. */
623 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
624 strelts = strelts / eltsize;
626 if (!tree_fits_uhwi_p (memsize))
627 return NULL_TREE;
629 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
631 /* PTR can point to the byte representation of any string type, including
632 char* and wchar_t*. */
633 const char *ptr = TREE_STRING_POINTER (src);
635 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
637 /* The code below works only for single byte character types. */
638 if (eltsize != 1)
639 return NULL_TREE;
641 /* If the string has an internal NUL character followed by any
642 non-NUL characters (e.g., "foo\0bar"), we can't compute
643 the offset to the following NUL if we don't know where to
644 start searching for it. */
645 unsigned len = string_length (ptr, eltsize, strelts);
647 /* Return when an embedded null character is found or none at all.
648 In the latter case, set the DECL/LEN field in the DATA structure
649 so that callers may examine them. */
650 if (len + 1 < strelts)
651 return NULL_TREE;
652 else if (len >= maxelts)
654 data->decl = decl;
655 data->off = byteoff;
656 data->minlen = ssize_int (len);
657 return NULL_TREE;
660 /* For empty strings the result should be zero. */
661 if (len == 0)
662 return ssize_int (0);
664 /* We don't know the starting offset, but we do know that the string
665 has no internal zero bytes. If the offset falls within the bounds
666 of the string subtract the offset from the length of the string,
667 and return that. Otherwise the length is zero. Take care to
668 use SAVE_EXPR in case the OFFSET has side-effects. */
669 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
670 : byteoff;
671 offsave = fold_convert_loc (loc, sizetype, offsave);
672 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
673 size_int (len));
674 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
675 offsave);
676 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
677 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
678 build_zero_cst (ssizetype));
681 /* Offset from the beginning of the string in elements. */
682 HOST_WIDE_INT eltoff;
684 /* We have a known offset into the string. Start searching there for
685 a null character if we can represent it as a single HOST_WIDE_INT. */
686 if (byteoff == 0)
687 eltoff = 0;
688 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
689 eltoff = -1;
690 else
691 eltoff = tree_to_uhwi (byteoff) / eltsize;
693 /* If the offset is known to be out of bounds, warn, and call strlen at
694 runtime. */
695 if (eltoff < 0 || eltoff >= maxelts)
697 /* Suppress multiple warnings for propagated constant strings. */
698 if (only_value != 2
699 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
700 && warning_at (loc, OPT_Warray_bounds_,
701 "offset %qwi outside bounds of constant string",
702 eltoff))
704 if (decl)
705 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
706 suppress_warning (arg, OPT_Warray_bounds_);
708 return NULL_TREE;
711 /* If eltoff is larger than strelts but less than maxelts the
712 string length is zero, since the excess memory will be zero. */
713 if (eltoff > strelts)
714 return ssize_int (0);
716 /* Use strlen to search for the first zero byte. Since any strings
717 constructed with build_string will have nulls appended, we win even
718 if we get handed something like (char[4])"abcd".
720 Since ELTOFF is our starting index into the string, no further
721 calculation is needed. */
722 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
723 strelts - eltoff);
725 /* Don't know what to return if there was no zero termination.
726 Ideally this would turn into a gcc_checking_assert over time.
727 Set DECL/LEN so callers can examine them. */
728 if (len >= maxelts - eltoff)
730 data->decl = decl;
731 data->off = byteoff;
732 data->minlen = ssize_int (len);
733 return NULL_TREE;
736 return ssize_int (len);
739 /* Return a constant integer corresponding to target reading
740 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
741 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
742 are assumed to be zero, otherwise it reads as many characters
743 as needed. */
746 c_readstr (const char *str, scalar_int_mode mode,
747 bool null_terminated_p/*=true*/)
749 HOST_WIDE_INT ch;
750 unsigned int i, j;
751 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
753 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
754 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
755 / HOST_BITS_PER_WIDE_INT;
757 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
758 for (i = 0; i < len; i++)
759 tmp[i] = 0;
761 ch = 1;
762 for (i = 0; i < GET_MODE_SIZE (mode); i++)
764 j = i;
765 if (WORDS_BIG_ENDIAN)
766 j = GET_MODE_SIZE (mode) - i - 1;
767 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
768 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
769 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
770 j *= BITS_PER_UNIT;
772 if (ch || !null_terminated_p)
773 ch = (unsigned char) str[i];
774 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
777 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
778 return immed_wide_int_const (c, mode);
781 /* Cast a target constant CST to target CHAR and if that value fits into
782 host char type, return zero and put that value into variable pointed to by
783 P. */
785 static int
786 target_char_cast (tree cst, char *p)
788 unsigned HOST_WIDE_INT val, hostval;
790 if (TREE_CODE (cst) != INTEGER_CST
791 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
792 return 1;
794 /* Do not care if it fits or not right here. */
795 val = TREE_INT_CST_LOW (cst);
797 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
798 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
800 hostval = val;
801 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
802 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
804 if (val != hostval)
805 return 1;
807 *p = hostval;
808 return 0;
811 /* Similar to save_expr, but assumes that arbitrary code is not executed
812 in between the multiple evaluations. In particular, we assume that a
813 non-addressable local variable will not be modified. */
815 static tree
816 builtin_save_expr (tree exp)
818 if (TREE_CODE (exp) == SSA_NAME
819 || (TREE_ADDRESSABLE (exp) == 0
820 && (TREE_CODE (exp) == PARM_DECL
821 || (VAR_P (exp) && !TREE_STATIC (exp)))))
822 return exp;
824 return save_expr (exp);
827 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
828 times to get the address of either a higher stack frame, or a return
829 address located within it (depending on FNDECL_CODE). */
831 static rtx
832 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
834 int i;
835 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
836 if (tem == NULL_RTX)
838 /* For a zero count with __builtin_return_address, we don't care what
839 frame address we return, because target-specific definitions will
840 override us. Therefore frame pointer elimination is OK, and using
841 the soft frame pointer is OK.
843 For a nonzero count, or a zero count with __builtin_frame_address,
844 we require a stable offset from the current frame pointer to the
845 previous one, so we must use the hard frame pointer, and
846 we must disable frame pointer elimination. */
847 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
848 tem = frame_pointer_rtx;
849 else
851 tem = hard_frame_pointer_rtx;
853 /* Tell reload not to eliminate the frame pointer. */
854 crtl->accesses_prior_frames = 1;
858 if (count > 0)
859 SETUP_FRAME_ADDRESSES ();
861 /* On the SPARC, the return address is not in the frame, it is in a
862 register. There is no way to access it off of the current frame
863 pointer, but it can be accessed off the previous frame pointer by
864 reading the value from the register window save area. */
865 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
866 count--;
868 /* Scan back COUNT frames to the specified frame. */
869 for (i = 0; i < count; i++)
871 /* Assume the dynamic chain pointer is in the word that the
872 frame address points to, unless otherwise specified. */
873 tem = DYNAMIC_CHAIN_ADDRESS (tem);
874 tem = memory_address (Pmode, tem);
875 tem = gen_frame_mem (Pmode, tem);
876 tem = copy_to_reg (tem);
879 /* For __builtin_frame_address, return what we've got. But, on
880 the SPARC for example, we may have to add a bias. */
881 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
882 return FRAME_ADDR_RTX (tem);
884 /* For __builtin_return_address, get the return address from that frame. */
885 #ifdef RETURN_ADDR_RTX
886 tem = RETURN_ADDR_RTX (count, tem);
887 #else
888 tem = memory_address (Pmode,
889 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
890 tem = gen_frame_mem (Pmode, tem);
891 #endif
892 return tem;
895 /* Alias set used for setjmp buffer. */
896 static alias_set_type setjmp_alias_set = -1;
898 /* Construct the leading half of a __builtin_setjmp call. Control will
899 return to RECEIVER_LABEL. This is also called directly by the SJLJ
900 exception handling code. */
902 void
903 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
905 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
906 rtx stack_save;
907 rtx mem;
909 if (setjmp_alias_set == -1)
910 setjmp_alias_set = new_alias_set ();
912 buf_addr = convert_memory_address (Pmode, buf_addr);
914 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
916 /* We store the frame pointer and the address of receiver_label in
917 the buffer and use the rest of it for the stack save area, which
918 is machine-dependent. */
920 mem = gen_rtx_MEM (Pmode, buf_addr);
921 set_mem_alias_set (mem, setjmp_alias_set);
922 emit_move_insn (mem, hard_frame_pointer_rtx);
924 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
925 GET_MODE_SIZE (Pmode))),
926 set_mem_alias_set (mem, setjmp_alias_set);
928 emit_move_insn (validize_mem (mem),
929 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
931 stack_save = gen_rtx_MEM (sa_mode,
932 plus_constant (Pmode, buf_addr,
933 2 * GET_MODE_SIZE (Pmode)));
934 set_mem_alias_set (stack_save, setjmp_alias_set);
935 emit_stack_save (SAVE_NONLOCAL, &stack_save);
937 /* If there is further processing to do, do it. */
938 if (targetm.have_builtin_setjmp_setup ())
939 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
941 /* We have a nonlocal label. */
942 cfun->has_nonlocal_label = 1;
945 /* Construct the trailing part of a __builtin_setjmp call. This is
946 also called directly by the SJLJ exception handling code.
947 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
949 void
950 expand_builtin_setjmp_receiver (rtx receiver_label)
952 rtx chain;
954 /* Mark the FP as used when we get here, so we have to make sure it's
955 marked as used by this function. */
956 emit_use (hard_frame_pointer_rtx);
958 /* Mark the static chain as clobbered here so life information
959 doesn't get messed up for it. */
960 chain = rtx_for_static_chain (current_function_decl, true);
961 if (chain && REG_P (chain))
962 emit_clobber (chain);
964 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
966 /* If the argument pointer can be eliminated in favor of the
967 frame pointer, we don't need to restore it. We assume here
968 that if such an elimination is present, it can always be used.
969 This is the case on all known machines; if we don't make this
970 assumption, we do unnecessary saving on many machines. */
971 size_t i;
972 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
974 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
975 if (elim_regs[i].from == ARG_POINTER_REGNUM
976 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
977 break;
979 if (i == ARRAY_SIZE (elim_regs))
981 /* Now restore our arg pointer from the address at which it
982 was saved in our stack frame. */
983 emit_move_insn (crtl->args.internal_arg_pointer,
984 copy_to_reg (get_arg_pointer_save_area ()));
988 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
989 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
990 else if (targetm.have_nonlocal_goto_receiver ())
991 emit_insn (targetm.gen_nonlocal_goto_receiver ());
992 else
993 { /* Nothing */ }
995 /* We must not allow the code we just generated to be reordered by
996 scheduling. Specifically, the update of the frame pointer must
997 happen immediately, not later. */
998 emit_insn (gen_blockage ());
1001 /* __builtin_longjmp is passed a pointer to an array of five words (not
1002 all will be used on all machines). It operates similarly to the C
1003 library function of the same name, but is more efficient. Much of
1004 the code below is copied from the handling of non-local gotos. */
1006 static void
1007 expand_builtin_longjmp (rtx buf_addr, rtx value)
1009 rtx fp, lab, stack;
1010 rtx_insn *insn, *last;
1011 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 /* DRAP is needed for stack realign if longjmp is expanded to current
1014 function */
1015 if (SUPPORTS_STACK_ALIGNMENT)
1016 crtl->need_drap = true;
1018 if (setjmp_alias_set == -1)
1019 setjmp_alias_set = new_alias_set ();
1021 buf_addr = convert_memory_address (Pmode, buf_addr);
1023 buf_addr = force_reg (Pmode, buf_addr);
1025 /* We require that the user must pass a second argument of 1, because
1026 that is what builtin_setjmp will return. */
1027 gcc_assert (value == const1_rtx);
1029 last = get_last_insn ();
1030 if (targetm.have_builtin_longjmp ())
1031 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1032 else
1034 fp = gen_rtx_MEM (Pmode, buf_addr);
1035 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1036 GET_MODE_SIZE (Pmode)));
1038 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1039 2 * GET_MODE_SIZE (Pmode)));
1040 set_mem_alias_set (fp, setjmp_alias_set);
1041 set_mem_alias_set (lab, setjmp_alias_set);
1042 set_mem_alias_set (stack, setjmp_alias_set);
1044 /* Pick up FP, label, and SP from the block and jump. This code is
1045 from expand_goto in stmt.cc; see there for detailed comments. */
1046 if (targetm.have_nonlocal_goto ())
1047 /* We have to pass a value to the nonlocal_goto pattern that will
1048 get copied into the static_chain pointer, but it does not matter
1049 what that value is, because builtin_setjmp does not use it. */
1050 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1051 else
1053 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1054 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1056 lab = copy_to_reg (lab);
1058 /* Restore the frame pointer and stack pointer. We must use a
1059 temporary since the setjmp buffer may be a local. */
1060 fp = copy_to_reg (fp);
1061 emit_stack_restore (SAVE_NONLOCAL, stack);
1063 /* Ensure the frame pointer move is not optimized. */
1064 emit_insn (gen_blockage ());
1065 emit_clobber (hard_frame_pointer_rtx);
1066 emit_clobber (frame_pointer_rtx);
1067 emit_move_insn (hard_frame_pointer_rtx, fp);
1069 emit_use (hard_frame_pointer_rtx);
1070 emit_use (stack_pointer_rtx);
1071 emit_indirect_jump (lab);
1075 /* Search backwards and mark the jump insn as a non-local goto.
1076 Note that this precludes the use of __builtin_longjmp to a
1077 __builtin_setjmp target in the same function. However, we've
1078 already cautioned the user that these functions are for
1079 internal exception handling use only. */
1080 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1082 gcc_assert (insn != last);
1084 if (JUMP_P (insn))
1086 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1087 break;
1089 else if (CALL_P (insn))
1090 break;
1094 static inline bool
1095 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1097 return (iter->i < iter->n);
1100 /* This function validates the types of a function call argument list
1101 against a specified list of tree_codes. If the last specifier is a 0,
1102 that represents an ellipsis, otherwise the last specifier must be a
1103 VOID_TYPE. */
1105 static bool
1106 validate_arglist (const_tree callexpr, ...)
1108 enum tree_code code;
1109 bool res = 0;
1110 va_list ap;
1111 const_call_expr_arg_iterator iter;
1112 const_tree arg;
1114 va_start (ap, callexpr);
1115 init_const_call_expr_arg_iterator (callexpr, &iter);
1117 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1118 tree fn = CALL_EXPR_FN (callexpr);
1119 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1121 for (unsigned argno = 1; ; ++argno)
1123 code = (enum tree_code) va_arg (ap, int);
1125 switch (code)
1127 case 0:
1128 /* This signifies an ellipses, any further arguments are all ok. */
1129 res = true;
1130 goto end;
1131 case VOID_TYPE:
1132 /* This signifies an endlink, if no arguments remain, return
1133 true, otherwise return false. */
1134 res = !more_const_call_expr_args_p (&iter);
1135 goto end;
1136 case POINTER_TYPE:
1137 /* The actual argument must be nonnull when either the whole
1138 called function has been declared nonnull, or when the formal
1139 argument corresponding to the actual argument has been. */
1140 if (argmap
1141 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1143 arg = next_const_call_expr_arg (&iter);
1144 if (!validate_arg (arg, code) || integer_zerop (arg))
1145 goto end;
1146 break;
1148 /* FALLTHRU */
1149 default:
1150 /* If no parameters remain or the parameter's code does not
1151 match the specified code, return false. Otherwise continue
1152 checking any remaining arguments. */
1153 arg = next_const_call_expr_arg (&iter);
1154 if (!validate_arg (arg, code))
1155 goto end;
1156 break;
1160 /* We need gotos here since we can only have one VA_CLOSE in a
1161 function. */
1162 end: ;
1163 va_end (ap);
1165 BITMAP_FREE (argmap);
1167 return res;
1170 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1171 and the address of the save area. */
1173 static rtx
1174 expand_builtin_nonlocal_goto (tree exp)
1176 tree t_label, t_save_area;
1177 rtx r_label, r_save_area, r_fp, r_sp;
1178 rtx_insn *insn;
1180 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1181 return NULL_RTX;
1183 t_label = CALL_EXPR_ARG (exp, 0);
1184 t_save_area = CALL_EXPR_ARG (exp, 1);
1186 r_label = expand_normal (t_label);
1187 r_label = convert_memory_address (Pmode, r_label);
1188 r_save_area = expand_normal (t_save_area);
1189 r_save_area = convert_memory_address (Pmode, r_save_area);
1190 /* Copy the address of the save location to a register just in case it was
1191 based on the frame pointer. */
1192 r_save_area = copy_to_reg (r_save_area);
1193 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1194 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1195 plus_constant (Pmode, r_save_area,
1196 GET_MODE_SIZE (Pmode)));
1198 crtl->has_nonlocal_goto = 1;
1200 /* ??? We no longer need to pass the static chain value, afaik. */
1201 if (targetm.have_nonlocal_goto ())
1202 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1203 else
1205 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1206 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1208 r_label = copy_to_reg (r_label);
1210 /* Restore the frame pointer and stack pointer. We must use a
1211 temporary since the setjmp buffer may be a local. */
1212 r_fp = copy_to_reg (r_fp);
1213 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1215 /* Ensure the frame pointer move is not optimized. */
1216 emit_insn (gen_blockage ());
1217 emit_clobber (hard_frame_pointer_rtx);
1218 emit_clobber (frame_pointer_rtx);
1219 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1221 /* USE of hard_frame_pointer_rtx added for consistency;
1222 not clear if really needed. */
1223 emit_use (hard_frame_pointer_rtx);
1224 emit_use (stack_pointer_rtx);
1226 /* If the architecture is using a GP register, we must
1227 conservatively assume that the target function makes use of it.
1228 The prologue of functions with nonlocal gotos must therefore
1229 initialize the GP register to the appropriate value, and we
1230 must then make sure that this value is live at the point
1231 of the jump. (Note that this doesn't necessarily apply
1232 to targets with a nonlocal_goto pattern; they are free
1233 to implement it in their own way. Note also that this is
1234 a no-op if the GP register is a global invariant.) */
1235 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1236 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1237 emit_use (pic_offset_table_rtx);
1239 emit_indirect_jump (r_label);
1242 /* Search backwards to the jump insn and mark it as a
1243 non-local goto. */
1244 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1246 if (JUMP_P (insn))
1248 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1249 break;
1251 else if (CALL_P (insn))
1252 break;
1255 return const0_rtx;
1258 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1259 (not all will be used on all machines) that was passed to __builtin_setjmp.
1260 It updates the stack pointer in that block to the current value. This is
1261 also called directly by the SJLJ exception handling code. */
1263 void
1264 expand_builtin_update_setjmp_buf (rtx buf_addr)
1266 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1267 buf_addr = convert_memory_address (Pmode, buf_addr);
1268 rtx stack_save
1269 = gen_rtx_MEM (sa_mode,
1270 memory_address
1271 (sa_mode,
1272 plus_constant (Pmode, buf_addr,
1273 2 * GET_MODE_SIZE (Pmode))));
1275 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1278 /* Expand a call to __builtin_prefetch. For a target that does not support
1279 data prefetch, evaluate the memory address argument in case it has side
1280 effects. */
1282 static void
1283 expand_builtin_prefetch (tree exp)
1285 tree arg0, arg1, arg2;
1286 int nargs;
1287 rtx op0, op1, op2;
1289 if (!validate_arglist (exp, POINTER_TYPE, 0))
1290 return;
1292 arg0 = CALL_EXPR_ARG (exp, 0);
1294 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1295 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1296 locality). */
1297 nargs = call_expr_nargs (exp);
1298 if (nargs > 1)
1299 arg1 = CALL_EXPR_ARG (exp, 1);
1300 else
1301 arg1 = integer_zero_node;
1302 if (nargs > 2)
1303 arg2 = CALL_EXPR_ARG (exp, 2);
1304 else
1305 arg2 = integer_three_node;
1307 /* Argument 0 is an address. */
1308 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1310 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1311 if (TREE_CODE (arg1) != INTEGER_CST)
1313 error ("second argument to %<__builtin_prefetch%> must be a constant");
1314 arg1 = integer_zero_node;
1316 op1 = expand_normal (arg1);
1317 /* Argument 1 must be either zero or one. */
1318 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1320 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1321 " using zero");
1322 op1 = const0_rtx;
1325 /* Argument 2 (locality) must be a compile-time constant int. */
1326 if (TREE_CODE (arg2) != INTEGER_CST)
1328 error ("third argument to %<__builtin_prefetch%> must be a constant");
1329 arg2 = integer_zero_node;
1331 op2 = expand_normal (arg2);
1332 /* Argument 2 must be 0, 1, 2, or 3. */
1333 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1335 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1336 op2 = const0_rtx;
1339 if (targetm.have_prefetch ())
1341 class expand_operand ops[3];
1343 create_address_operand (&ops[0], op0);
1344 create_integer_operand (&ops[1], INTVAL (op1));
1345 create_integer_operand (&ops[2], INTVAL (op2));
1346 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1347 return;
1350 /* Don't do anything with direct references to volatile memory, but
1351 generate code to handle other side effects. */
1352 if (!MEM_P (op0) && side_effects_p (op0))
1353 emit_insn (op0);
1356 /* Get a MEM rtx for expression EXP which is the address of an operand
1357 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1358 the maximum length of the block of memory that might be accessed or
1359 NULL if unknown. */
1362 get_memory_rtx (tree exp, tree len)
1364 tree orig_exp = exp, base;
1365 rtx addr, mem;
1367 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1368 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1369 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1370 exp = TREE_OPERAND (exp, 0);
1372 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1373 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1375 /* Get an expression we can use to find the attributes to assign to MEM.
1376 First remove any nops. */
1377 while (CONVERT_EXPR_P (exp)
1378 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1379 exp = TREE_OPERAND (exp, 0);
1381 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1382 (as builtin stringops may alias with anything). */
1383 exp = fold_build2 (MEM_REF,
1384 build_array_type (char_type_node,
1385 build_range_type (sizetype,
1386 size_one_node, len)),
1387 exp, build_int_cst (ptr_type_node, 0));
1389 /* If the MEM_REF has no acceptable address, try to get the base object
1390 from the original address we got, and build an all-aliasing
1391 unknown-sized access to that one. */
1392 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1393 set_mem_attributes (mem, exp, 0);
1394 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1395 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1396 0))))
1398 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1399 exp = build_fold_addr_expr (base);
1400 exp = fold_build2 (MEM_REF,
1401 build_array_type (char_type_node,
1402 build_range_type (sizetype,
1403 size_zero_node,
1404 NULL)),
1405 exp, build_int_cst (ptr_type_node, 0));
1406 set_mem_attributes (mem, exp, 0);
1407 /* Since we stripped parts make sure the offset is unknown and the
1408 alignment is computed from the original address. */
1409 clear_mem_offset (mem);
1410 set_mem_align (mem, align);
1412 set_mem_alias_set (mem, 0);
1413 return mem;
1416 /* Built-in functions to perform an untyped call and return. */
1418 #define apply_args_mode \
1419 (this_target_builtins->x_apply_args_mode)
1420 #define apply_result_mode \
1421 (this_target_builtins->x_apply_result_mode)
1423 /* Return the size required for the block returned by __builtin_apply_args,
1424 and initialize apply_args_mode. */
1426 static int
1427 apply_args_size (void)
1429 static int size = -1;
1430 int align;
1431 unsigned int regno;
1433 /* The values computed by this function never change. */
1434 if (size < 0)
1436 /* The first value is the incoming arg-pointer. */
1437 size = GET_MODE_SIZE (Pmode);
1439 /* The second value is the structure value address unless this is
1440 passed as an "invisible" first argument. */
1441 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1442 size += GET_MODE_SIZE (Pmode);
1444 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 if (FUNCTION_ARG_REGNO_P (regno))
1447 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1449 if (mode != VOIDmode)
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 size += GET_MODE_SIZE (mode);
1455 apply_args_mode[regno] = mode;
1457 else
1458 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1460 else
1461 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1463 return size;
1466 /* Return the size required for the block returned by __builtin_apply,
1467 and initialize apply_result_mode. */
1469 static int
1470 apply_result_size (void)
1472 static int size = -1;
1473 int align, regno;
1475 /* The values computed by this function never change. */
1476 if (size < 0)
1478 size = 0;
1480 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1481 if (targetm.calls.function_value_regno_p (regno))
1483 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1485 if (mode != VOIDmode)
1487 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1488 if (size % align != 0)
1489 size = CEIL (size, align) * align;
1490 size += GET_MODE_SIZE (mode);
1491 apply_result_mode[regno] = mode;
1493 else
1494 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1496 else
1497 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1499 /* Allow targets that use untyped_call and untyped_return to override
1500 the size so that machine-specific information can be stored here. */
1501 #ifdef APPLY_RESULT_SIZE
1502 size = APPLY_RESULT_SIZE;
1503 #endif
1505 return size;
1508 /* Create a vector describing the result block RESULT. If SAVEP is true,
1509 the result block is used to save the values; otherwise it is used to
1510 restore the values. */
1512 static rtx
1513 result_vector (int savep, rtx result)
1515 int regno, size, align, nelts;
1516 fixed_size_mode mode;
1517 rtx reg, mem;
1518 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1520 size = nelts = 0;
1521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1522 if ((mode = apply_result_mode[regno]) != VOIDmode)
1524 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1525 if (size % align != 0)
1526 size = CEIL (size, align) * align;
1527 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1528 mem = adjust_address (result, mode, size);
1529 savevec[nelts++] = (savep
1530 ? gen_rtx_SET (mem, reg)
1531 : gen_rtx_SET (reg, mem));
1532 size += GET_MODE_SIZE (mode);
1534 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1537 /* Save the state required to perform an untyped call with the same
1538 arguments as were passed to the current function. */
1540 static rtx
1541 expand_builtin_apply_args_1 (void)
1543 rtx registers, tem;
1544 int size, align, regno;
1545 fixed_size_mode mode;
1546 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1548 /* Create a block where the arg-pointer, structure value address,
1549 and argument registers can be saved. */
1550 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1552 /* Walk past the arg-pointer and structure value address. */
1553 size = GET_MODE_SIZE (Pmode);
1554 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1555 size += GET_MODE_SIZE (Pmode);
1557 /* Save each register used in calling a function to the block. */
1558 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1559 if ((mode = apply_args_mode[regno]) != VOIDmode)
1561 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1562 if (size % align != 0)
1563 size = CEIL (size, align) * align;
1565 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1567 emit_move_insn (adjust_address (registers, mode, size), tem);
1568 size += GET_MODE_SIZE (mode);
1571 /* Save the arg pointer to the block. */
1572 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1573 /* We need the pointer as the caller actually passed them to us, not
1574 as we might have pretended they were passed. Make sure it's a valid
1575 operand, as emit_move_insn isn't expected to handle a PLUS. */
1576 if (STACK_GROWS_DOWNWARD)
1578 = force_operand (plus_constant (Pmode, tem,
1579 crtl->args.pretend_args_size),
1580 NULL_RTX);
1581 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1583 size = GET_MODE_SIZE (Pmode);
1585 /* Save the structure value address unless this is passed as an
1586 "invisible" first argument. */
1587 if (struct_incoming_value)
1588 emit_move_insn (adjust_address (registers, Pmode, size),
1589 copy_to_reg (struct_incoming_value));
1591 /* Return the address of the block. */
1592 return copy_addr_to_reg (XEXP (registers, 0));
1595 /* __builtin_apply_args returns block of memory allocated on
1596 the stack into which is stored the arg pointer, structure
1597 value address, static chain, and all the registers that might
1598 possibly be used in performing a function call. The code is
1599 moved to the start of the function so the incoming values are
1600 saved. */
1602 static rtx
1603 expand_builtin_apply_args (void)
1605 /* Don't do __builtin_apply_args more than once in a function.
1606 Save the result of the first call and reuse it. */
1607 if (apply_args_value != 0)
1608 return apply_args_value;
1610 /* When this function is called, it means that registers must be
1611 saved on entry to this function. So we migrate the
1612 call to the first insn of this function. */
1613 rtx temp;
1615 start_sequence ();
1616 temp = expand_builtin_apply_args_1 ();
1617 rtx_insn *seq = get_insns ();
1618 end_sequence ();
1620 apply_args_value = temp;
1622 /* Put the insns after the NOTE that starts the function.
1623 If this is inside a start_sequence, make the outer-level insn
1624 chain current, so the code is placed at the start of the
1625 function. If internal_arg_pointer is a non-virtual pseudo,
1626 it needs to be placed after the function that initializes
1627 that pseudo. */
1628 push_topmost_sequence ();
1629 if (REG_P (crtl->args.internal_arg_pointer)
1630 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1631 emit_insn_before (seq, parm_birth_insn);
1632 else
1633 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1634 pop_topmost_sequence ();
1635 return temp;
1639 /* Perform an untyped call and save the state required to perform an
1640 untyped return of whatever value was returned by the given function. */
1642 static rtx
1643 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1645 int size, align, regno;
1646 fixed_size_mode mode;
1647 rtx incoming_args, result, reg, dest, src;
1648 rtx_call_insn *call_insn;
1649 rtx old_stack_level = 0;
1650 rtx call_fusage = 0;
1651 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1653 arguments = convert_memory_address (Pmode, arguments);
1655 /* Create a block where the return registers can be saved. */
1656 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1658 /* Fetch the arg pointer from the ARGUMENTS block. */
1659 incoming_args = gen_reg_rtx (Pmode);
1660 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1661 if (!STACK_GROWS_DOWNWARD)
1662 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1663 incoming_args, 0, OPTAB_LIB_WIDEN);
1665 /* Push a new argument block and copy the arguments. Do not allow
1666 the (potential) memcpy call below to interfere with our stack
1667 manipulations. */
1668 do_pending_stack_adjust ();
1669 NO_DEFER_POP;
1671 /* Save the stack with nonlocal if available. */
1672 if (targetm.have_save_stack_nonlocal ())
1673 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1674 else
1675 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1677 /* Allocate a block of memory onto the stack and copy the memory
1678 arguments to the outgoing arguments address. We can pass TRUE
1679 as the 4th argument because we just saved the stack pointer
1680 and will restore it right after the call. */
1681 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1683 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1684 may have already set current_function_calls_alloca to true.
1685 current_function_calls_alloca won't be set if argsize is zero,
1686 so we have to guarantee need_drap is true here. */
1687 if (SUPPORTS_STACK_ALIGNMENT)
1688 crtl->need_drap = true;
1690 dest = virtual_outgoing_args_rtx;
1691 if (!STACK_GROWS_DOWNWARD)
1693 if (CONST_INT_P (argsize))
1694 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1695 else
1696 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1698 dest = gen_rtx_MEM (BLKmode, dest);
1699 set_mem_align (dest, PARM_BOUNDARY);
1700 src = gen_rtx_MEM (BLKmode, incoming_args);
1701 set_mem_align (src, PARM_BOUNDARY);
1702 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1704 /* Refer to the argument block. */
1705 apply_args_size ();
1706 arguments = gen_rtx_MEM (BLKmode, arguments);
1707 set_mem_align (arguments, PARM_BOUNDARY);
1709 /* Walk past the arg-pointer and structure value address. */
1710 size = GET_MODE_SIZE (Pmode);
1711 if (struct_value)
1712 size += GET_MODE_SIZE (Pmode);
1714 /* Restore each of the registers previously saved. Make USE insns
1715 for each of these registers for use in making the call. */
1716 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1717 if ((mode = apply_args_mode[regno]) != VOIDmode)
1719 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1720 if (size % align != 0)
1721 size = CEIL (size, align) * align;
1722 reg = gen_rtx_REG (mode, regno);
1723 emit_move_insn (reg, adjust_address (arguments, mode, size));
1724 use_reg (&call_fusage, reg);
1725 size += GET_MODE_SIZE (mode);
1728 /* Restore the structure value address unless this is passed as an
1729 "invisible" first argument. */
1730 size = GET_MODE_SIZE (Pmode);
1731 if (struct_value)
1733 rtx value = gen_reg_rtx (Pmode);
1734 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1735 emit_move_insn (struct_value, value);
1736 if (REG_P (struct_value))
1737 use_reg (&call_fusage, struct_value);
1740 /* All arguments and registers used for the call are set up by now! */
1741 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1743 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1744 and we don't want to load it into a register as an optimization,
1745 because prepare_call_address already did it if it should be done. */
1746 if (GET_CODE (function) != SYMBOL_REF)
1747 function = memory_address (FUNCTION_MODE, function);
1749 /* Generate the actual call instruction and save the return value. */
1750 if (targetm.have_untyped_call ())
1752 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1753 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1754 result_vector (1, result));
1755 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1756 if (CALL_P (insn))
1757 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1758 emit_insn (seq);
1760 else if (targetm.have_call_value ())
1762 rtx valreg = 0;
1764 /* Locate the unique return register. It is not possible to
1765 express a call that sets more than one return register using
1766 call_value; use untyped_call for that. In fact, untyped_call
1767 only needs to save the return registers in the given block. */
1768 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1769 if ((mode = apply_result_mode[regno]) != VOIDmode)
1771 gcc_assert (!valreg); /* have_untyped_call required. */
1773 valreg = gen_rtx_REG (mode, regno);
1776 emit_insn (targetm.gen_call_value (valreg,
1777 gen_rtx_MEM (FUNCTION_MODE, function),
1778 const0_rtx, NULL_RTX, const0_rtx));
1780 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1782 else
1783 gcc_unreachable ();
1785 /* Find the CALL insn we just emitted, and attach the register usage
1786 information. */
1787 call_insn = last_call_insn ();
1788 add_function_usage_to (call_insn, call_fusage);
1790 /* Restore the stack. */
1791 if (targetm.have_save_stack_nonlocal ())
1792 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1793 else
1794 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1795 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1797 OK_DEFER_POP;
1799 /* Return the address of the result block. */
1800 result = copy_addr_to_reg (XEXP (result, 0));
1801 return convert_memory_address (ptr_mode, result);
1804 /* Perform an untyped return. */
1806 static void
1807 expand_builtin_return (rtx result)
1809 int size, align, regno;
1810 fixed_size_mode mode;
1811 rtx reg;
1812 rtx_insn *call_fusage = 0;
1814 result = convert_memory_address (Pmode, result);
1816 apply_result_size ();
1817 result = gen_rtx_MEM (BLKmode, result);
1819 if (targetm.have_untyped_return ())
1821 rtx vector = result_vector (0, result);
1822 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1823 emit_barrier ();
1824 return;
1827 /* Restore the return value and note that each value is used. */
1828 size = 0;
1829 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1830 if ((mode = apply_result_mode[regno]) != VOIDmode)
1832 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1833 if (size % align != 0)
1834 size = CEIL (size, align) * align;
1835 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1836 emit_move_insn (reg, adjust_address (result, mode, size));
1838 push_to_sequence (call_fusage);
1839 emit_use (reg);
1840 call_fusage = get_insns ();
1841 end_sequence ();
1842 size += GET_MODE_SIZE (mode);
1845 /* Put the USE insns before the return. */
1846 emit_insn (call_fusage);
1848 /* Return whatever values was restored by jumping directly to the end
1849 of the function. */
1850 expand_naked_return ();
1853 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1855 static enum type_class
1856 type_to_class (tree type)
1858 switch (TREE_CODE (type))
1860 case VOID_TYPE: return void_type_class;
1861 case INTEGER_TYPE: return integer_type_class;
1862 case ENUMERAL_TYPE: return enumeral_type_class;
1863 case BOOLEAN_TYPE: return boolean_type_class;
1864 case POINTER_TYPE: return pointer_type_class;
1865 case REFERENCE_TYPE: return reference_type_class;
1866 case OFFSET_TYPE: return offset_type_class;
1867 case REAL_TYPE: return real_type_class;
1868 case COMPLEX_TYPE: return complex_type_class;
1869 case FUNCTION_TYPE: return function_type_class;
1870 case METHOD_TYPE: return method_type_class;
1871 case RECORD_TYPE: return record_type_class;
1872 case UNION_TYPE:
1873 case QUAL_UNION_TYPE: return union_type_class;
1874 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1875 ? string_type_class : array_type_class);
1876 case LANG_TYPE: return lang_type_class;
1877 case OPAQUE_TYPE: return opaque_type_class;
1878 default: return no_type_class;
1882 /* Expand a call EXP to __builtin_classify_type. */
1884 static rtx
1885 expand_builtin_classify_type (tree exp)
1887 if (call_expr_nargs (exp))
1888 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1889 return GEN_INT (no_type_class);
1892 /* This helper macro, meant to be used in mathfn_built_in below, determines
1893 which among a set of builtin math functions is appropriate for a given type
1894 mode. The `F' (float) and `L' (long double) are automatically generated
1895 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1896 types, there are additional types that are considered with 'F32', 'F64',
1897 'F128', etc. suffixes. */
1898 #define CASE_MATHFN(MATHFN) \
1899 CASE_CFN_##MATHFN: \
1900 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1901 fcodel = BUILT_IN_##MATHFN##L ; break;
1902 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1903 types. */
1904 #define CASE_MATHFN_FLOATN(MATHFN) \
1905 CASE_CFN_##MATHFN: \
1906 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1907 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1908 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1909 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1910 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1911 break;
1912 /* Similar to above, but appends _R after any F/L suffix. */
1913 #define CASE_MATHFN_REENT(MATHFN) \
1914 case CFN_BUILT_IN_##MATHFN##_R: \
1915 case CFN_BUILT_IN_##MATHFN##F_R: \
1916 case CFN_BUILT_IN_##MATHFN##L_R: \
1917 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1918 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1920 /* Return a function equivalent to FN but operating on floating-point
1921 values of type TYPE, or END_BUILTINS if no such function exists.
1922 This is purely an operation on function codes; it does not guarantee
1923 that the target actually has an implementation of the function. */
1925 static built_in_function
1926 mathfn_built_in_2 (tree type, combined_fn fn)
1928 tree mtype;
1929 built_in_function fcode, fcodef, fcodel;
1930 built_in_function fcodef16 = END_BUILTINS;
1931 built_in_function fcodef32 = END_BUILTINS;
1932 built_in_function fcodef64 = END_BUILTINS;
1933 built_in_function fcodef128 = END_BUILTINS;
1934 built_in_function fcodef32x = END_BUILTINS;
1935 built_in_function fcodef64x = END_BUILTINS;
1936 built_in_function fcodef128x = END_BUILTINS;
1938 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1939 break the uses below. */
1940 #undef HUGE_VAL
1941 #undef NAN
1943 switch (fn)
1945 #define SEQ_OF_CASE_MATHFN \
1946 CASE_MATHFN_FLOATN (ACOS) \
1947 CASE_MATHFN_FLOATN (ACOSH) \
1948 CASE_MATHFN_FLOATN (ASIN) \
1949 CASE_MATHFN_FLOATN (ASINH) \
1950 CASE_MATHFN_FLOATN (ATAN) \
1951 CASE_MATHFN_FLOATN (ATAN2) \
1952 CASE_MATHFN_FLOATN (ATANH) \
1953 CASE_MATHFN_FLOATN (CBRT) \
1954 CASE_MATHFN_FLOATN (CEIL) \
1955 CASE_MATHFN (CEXPI) \
1956 CASE_MATHFN_FLOATN (COPYSIGN) \
1957 CASE_MATHFN_FLOATN (COS) \
1958 CASE_MATHFN_FLOATN (COSH) \
1959 CASE_MATHFN (DREM) \
1960 CASE_MATHFN_FLOATN (ERF) \
1961 CASE_MATHFN_FLOATN (ERFC) \
1962 CASE_MATHFN_FLOATN (EXP) \
1963 CASE_MATHFN (EXP10) \
1964 CASE_MATHFN_FLOATN (EXP2) \
1965 CASE_MATHFN_FLOATN (EXPM1) \
1966 CASE_MATHFN_FLOATN (FABS) \
1967 CASE_MATHFN_FLOATN (FDIM) \
1968 CASE_MATHFN_FLOATN (FLOOR) \
1969 CASE_MATHFN_FLOATN (FMA) \
1970 CASE_MATHFN_FLOATN (FMAX) \
1971 CASE_MATHFN_FLOATN (FMIN) \
1972 CASE_MATHFN_FLOATN (FMOD) \
1973 CASE_MATHFN_FLOATN (FREXP) \
1974 CASE_MATHFN (GAMMA) \
1975 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1976 CASE_MATHFN_FLOATN (HUGE_VAL) \
1977 CASE_MATHFN_FLOATN (HYPOT) \
1978 CASE_MATHFN_FLOATN (ILOGB) \
1979 CASE_MATHFN (ICEIL) \
1980 CASE_MATHFN (IFLOOR) \
1981 CASE_MATHFN_FLOATN (INF) \
1982 CASE_MATHFN (IRINT) \
1983 CASE_MATHFN (IROUND) \
1984 CASE_MATHFN (ISINF) \
1985 CASE_MATHFN (J0) \
1986 CASE_MATHFN (J1) \
1987 CASE_MATHFN (JN) \
1988 CASE_MATHFN (LCEIL) \
1989 CASE_MATHFN_FLOATN (LDEXP) \
1990 CASE_MATHFN (LFLOOR) \
1991 CASE_MATHFN_FLOATN (LGAMMA) \
1992 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1993 CASE_MATHFN (LLCEIL) \
1994 CASE_MATHFN (LLFLOOR) \
1995 CASE_MATHFN_FLOATN (LLRINT) \
1996 CASE_MATHFN_FLOATN (LLROUND) \
1997 CASE_MATHFN_FLOATN (LOG) \
1998 CASE_MATHFN_FLOATN (LOG10) \
1999 CASE_MATHFN_FLOATN (LOG1P) \
2000 CASE_MATHFN_FLOATN (LOG2) \
2001 CASE_MATHFN_FLOATN (LOGB) \
2002 CASE_MATHFN_FLOATN (LRINT) \
2003 CASE_MATHFN_FLOATN (LROUND) \
2004 CASE_MATHFN_FLOATN (MODF) \
2005 CASE_MATHFN_FLOATN (NAN) \
2006 CASE_MATHFN_FLOATN (NANS) \
2007 CASE_MATHFN_FLOATN (NEARBYINT) \
2008 CASE_MATHFN_FLOATN (NEXTAFTER) \
2009 CASE_MATHFN (NEXTTOWARD) \
2010 CASE_MATHFN_FLOATN (POW) \
2011 CASE_MATHFN (POWI) \
2012 CASE_MATHFN (POW10) \
2013 CASE_MATHFN_FLOATN (REMAINDER) \
2014 CASE_MATHFN_FLOATN (REMQUO) \
2015 CASE_MATHFN_FLOATN (RINT) \
2016 CASE_MATHFN_FLOATN (ROUND) \
2017 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2018 CASE_MATHFN (SCALB) \
2019 CASE_MATHFN_FLOATN (SCALBLN) \
2020 CASE_MATHFN_FLOATN (SCALBN) \
2021 CASE_MATHFN (SIGNBIT) \
2022 CASE_MATHFN (SIGNIFICAND) \
2023 CASE_MATHFN_FLOATN (SIN) \
2024 CASE_MATHFN (SINCOS) \
2025 CASE_MATHFN_FLOATN (SINH) \
2026 CASE_MATHFN_FLOATN (SQRT) \
2027 CASE_MATHFN_FLOATN (TAN) \
2028 CASE_MATHFN_FLOATN (TANH) \
2029 CASE_MATHFN_FLOATN (TGAMMA) \
2030 CASE_MATHFN_FLOATN (TRUNC) \
2031 CASE_MATHFN (Y0) \
2032 CASE_MATHFN (Y1) \
2033 CASE_MATHFN (YN)
2035 SEQ_OF_CASE_MATHFN
2037 default:
2038 return END_BUILTINS;
2041 mtype = TYPE_MAIN_VARIANT (type);
2042 if (mtype == double_type_node)
2043 return fcode;
2044 else if (mtype == float_type_node)
2045 return fcodef;
2046 else if (mtype == long_double_type_node)
2047 return fcodel;
2048 else if (mtype == float16_type_node)
2049 return fcodef16;
2050 else if (mtype == float32_type_node)
2051 return fcodef32;
2052 else if (mtype == float64_type_node)
2053 return fcodef64;
2054 else if (mtype == float128_type_node)
2055 return fcodef128;
2056 else if (mtype == float32x_type_node)
2057 return fcodef32x;
2058 else if (mtype == float64x_type_node)
2059 return fcodef64x;
2060 else if (mtype == float128x_type_node)
2061 return fcodef128x;
2062 else
2063 return END_BUILTINS;
2066 #undef CASE_MATHFN
2067 #undef CASE_MATHFN_FLOATN
2068 #undef CASE_MATHFN_REENT
2070 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2071 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2072 otherwise use the explicit declaration. If we can't do the conversion,
2073 return null. */
2075 static tree
2076 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2078 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2079 if (fcode2 == END_BUILTINS)
2080 return NULL_TREE;
2082 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2083 return NULL_TREE;
2085 return builtin_decl_explicit (fcode2);
2088 /* Like mathfn_built_in_1, but always use the implicit array. */
2090 tree
2091 mathfn_built_in (tree type, combined_fn fn)
2093 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2096 /* Like mathfn_built_in_1, but always use the explicit array. */
2098 tree
2099 mathfn_built_in_explicit (tree type, combined_fn fn)
2101 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2104 /* Like mathfn_built_in_1, but take a built_in_function and
2105 always use the implicit array. */
2107 tree
2108 mathfn_built_in (tree type, enum built_in_function fn)
2110 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2113 /* Return the type associated with a built in function, i.e., the one
2114 to be passed to mathfn_built_in to get the type-specific
2115 function. */
2117 tree
2118 mathfn_built_in_type (combined_fn fn)
2120 #define CASE_MATHFN(MATHFN) \
2121 case CFN_BUILT_IN_##MATHFN: \
2122 return double_type_node; \
2123 case CFN_BUILT_IN_##MATHFN##F: \
2124 return float_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##L: \
2126 return long_double_type_node;
2128 #define CASE_MATHFN_FLOATN(MATHFN) \
2129 CASE_MATHFN(MATHFN) \
2130 case CFN_BUILT_IN_##MATHFN##F16: \
2131 return float16_type_node; \
2132 case CFN_BUILT_IN_##MATHFN##F32: \
2133 return float32_type_node; \
2134 case CFN_BUILT_IN_##MATHFN##F64: \
2135 return float64_type_node; \
2136 case CFN_BUILT_IN_##MATHFN##F128: \
2137 return float128_type_node; \
2138 case CFN_BUILT_IN_##MATHFN##F32X: \
2139 return float32x_type_node; \
2140 case CFN_BUILT_IN_##MATHFN##F64X: \
2141 return float64x_type_node; \
2142 case CFN_BUILT_IN_##MATHFN##F128X: \
2143 return float128x_type_node;
2145 /* Similar to above, but appends _R after any F/L suffix. */
2146 #define CASE_MATHFN_REENT(MATHFN) \
2147 case CFN_BUILT_IN_##MATHFN##_R: \
2148 return double_type_node; \
2149 case CFN_BUILT_IN_##MATHFN##F_R: \
2150 return float_type_node; \
2151 case CFN_BUILT_IN_##MATHFN##L_R: \
2152 return long_double_type_node;
2154 switch (fn)
2156 SEQ_OF_CASE_MATHFN
2158 default:
2159 return NULL_TREE;
2162 #undef CASE_MATHFN
2163 #undef CASE_MATHFN_FLOATN
2164 #undef CASE_MATHFN_REENT
2165 #undef SEQ_OF_CASE_MATHFN
2168 /* Check whether there is an internal function associated with function FN
2169 and return type RETURN_TYPE. Return the function if so, otherwise return
2170 IFN_LAST.
2172 Note that this function only tests whether the function is defined in
2173 internals.def, not whether it is actually available on the target. */
2175 static internal_fn
2176 associated_internal_fn (built_in_function fn, tree return_type)
2178 switch (fn)
2180 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2181 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2182 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2183 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2184 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2185 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2186 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2187 #include "internal-fn.def"
2189 CASE_FLT_FN (BUILT_IN_POW10):
2190 return IFN_EXP10;
2192 CASE_FLT_FN (BUILT_IN_DREM):
2193 return IFN_REMAINDER;
2195 CASE_FLT_FN (BUILT_IN_SCALBN):
2196 CASE_FLT_FN (BUILT_IN_SCALBLN):
2197 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2198 return IFN_LDEXP;
2199 return IFN_LAST;
2201 default:
2202 return IFN_LAST;
2206 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2207 return its code, otherwise return IFN_LAST. Note that this function
2208 only tests whether the function is defined in internals.def, not whether
2209 it is actually available on the target. */
2211 internal_fn
2212 associated_internal_fn (tree fndecl)
2214 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2215 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2216 TREE_TYPE (TREE_TYPE (fndecl)));
2219 /* Check whether there is an internal function associated with function CFN
2220 and return type RETURN_TYPE. Return the function if so, otherwise return
2221 IFN_LAST.
2223 Note that this function only tests whether the function is defined in
2224 internals.def, not whether it is actually available on the target. */
2226 internal_fn
2227 associated_internal_fn (combined_fn cfn, tree return_type)
2229 if (internal_fn_p (cfn))
2230 return as_internal_fn (cfn);
2231 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2234 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2235 on the current target by a call to an internal function, return the
2236 code of that internal function, otherwise return IFN_LAST. The caller
2237 is responsible for ensuring that any side-effects of the built-in
2238 call are dealt with correctly. E.g. if CALL sets errno, the caller
2239 must decide that the errno result isn't needed or make it available
2240 in some other way. */
2242 internal_fn
2243 replacement_internal_fn (gcall *call)
2245 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2247 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2248 if (ifn != IFN_LAST)
2250 tree_pair types = direct_internal_fn_types (ifn, call);
2251 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2252 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2253 return ifn;
2256 return IFN_LAST;
2259 /* Expand a call to the builtin trinary math functions (fma).
2260 Return NULL_RTX if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
2262 function; if convenient, the result should be placed in TARGET.
2263 SUBTARGET may be used as the target for computing one of EXP's
2264 operands. */
2266 static rtx
2267 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2269 optab builtin_optab;
2270 rtx op0, op1, op2, result;
2271 rtx_insn *insns;
2272 tree fndecl = get_callee_fndecl (exp);
2273 tree arg0, arg1, arg2;
2274 machine_mode mode;
2276 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2279 arg0 = CALL_EXPR_ARG (exp, 0);
2280 arg1 = CALL_EXPR_ARG (exp, 1);
2281 arg2 = CALL_EXPR_ARG (exp, 2);
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_FMA):
2286 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2287 builtin_optab = fma_optab; break;
2288 default:
2289 gcc_unreachable ();
2292 /* Make a suitable register to place result in. */
2293 mode = TYPE_MODE (TREE_TYPE (exp));
2295 /* Before working hard, check whether the instruction is available. */
2296 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2297 return NULL_RTX;
2299 result = gen_reg_rtx (mode);
2301 /* Always stabilize the argument list. */
2302 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2303 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2304 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2306 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2307 op1 = expand_normal (arg1);
2308 op2 = expand_normal (arg2);
2310 start_sequence ();
2312 /* Compute into RESULT.
2313 Set RESULT to wherever the result comes back. */
2314 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2315 result, 0);
2317 /* If we were unable to expand via the builtin, stop the sequence
2318 (without outputting the insns) and call to the library function
2319 with the stabilized argument list. */
2320 if (result == 0)
2322 end_sequence ();
2323 return expand_call (exp, target, target == const0_rtx);
2326 /* Output the entire sequence. */
2327 insns = get_insns ();
2328 end_sequence ();
2329 emit_insn (insns);
2331 return result;
2334 /* Expand a call to the builtin sin and cos math functions.
2335 Return NULL_RTX if a normal call should be emitted rather than expanding the
2336 function in-line. EXP is the expression that is a call to the builtin
2337 function; if convenient, the result should be placed in TARGET.
2338 SUBTARGET may be used as the target for computing one of EXP's
2339 operands. */
2341 static rtx
2342 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2344 optab builtin_optab;
2345 rtx op0;
2346 rtx_insn *insns;
2347 tree fndecl = get_callee_fndecl (exp);
2348 machine_mode mode;
2349 tree arg;
2351 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2352 return NULL_RTX;
2354 arg = CALL_EXPR_ARG (exp, 0);
2356 switch (DECL_FUNCTION_CODE (fndecl))
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = sincos_optab; break;
2361 default:
2362 gcc_unreachable ();
2365 /* Make a suitable register to place result in. */
2366 mode = TYPE_MODE (TREE_TYPE (exp));
2368 /* Check if sincos insn is available, otherwise fallback
2369 to sin or cos insn. */
2370 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2371 switch (DECL_FUNCTION_CODE (fndecl))
2373 CASE_FLT_FN (BUILT_IN_SIN):
2374 builtin_optab = sin_optab; break;
2375 CASE_FLT_FN (BUILT_IN_COS):
2376 builtin_optab = cos_optab; break;
2377 default:
2378 gcc_unreachable ();
2381 /* Before working hard, check whether the instruction is available. */
2382 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2384 rtx result = gen_reg_rtx (mode);
2386 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2387 need to expand the argument again. This way, we will not perform
2388 side-effects more the once. */
2389 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2391 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2393 start_sequence ();
2395 /* Compute into RESULT.
2396 Set RESULT to wherever the result comes back. */
2397 if (builtin_optab == sincos_optab)
2399 int ok;
2401 switch (DECL_FUNCTION_CODE (fndecl))
2403 CASE_FLT_FN (BUILT_IN_SIN):
2404 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2405 break;
2406 CASE_FLT_FN (BUILT_IN_COS):
2407 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2408 break;
2409 default:
2410 gcc_unreachable ();
2412 gcc_assert (ok);
2414 else
2415 result = expand_unop (mode, builtin_optab, op0, result, 0);
2417 if (result != 0)
2419 /* Output the entire sequence. */
2420 insns = get_insns ();
2421 end_sequence ();
2422 emit_insn (insns);
2423 return result;
2426 /* If we were unable to expand via the builtin, stop the sequence
2427 (without outputting the insns) and call to the library function
2428 with the stabilized argument list. */
2429 end_sequence ();
2432 return expand_call (exp, target, target == const0_rtx);
2435 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2436 return an RTL instruction code that implements the functionality.
2437 If that isn't possible or available return CODE_FOR_nothing. */
2439 static enum insn_code
2440 interclass_mathfn_icode (tree arg, tree fndecl)
2442 bool errno_set = false;
2443 optab builtin_optab = unknown_optab;
2444 machine_mode mode;
2446 switch (DECL_FUNCTION_CODE (fndecl))
2448 CASE_FLT_FN (BUILT_IN_ILOGB):
2449 errno_set = true; builtin_optab = ilogb_optab; break;
2450 CASE_FLT_FN (BUILT_IN_ISINF):
2451 builtin_optab = isinf_optab; break;
2452 case BUILT_IN_ISNORMAL:
2453 case BUILT_IN_ISFINITE:
2454 CASE_FLT_FN (BUILT_IN_FINITE):
2455 case BUILT_IN_FINITED32:
2456 case BUILT_IN_FINITED64:
2457 case BUILT_IN_FINITED128:
2458 case BUILT_IN_ISINFD32:
2459 case BUILT_IN_ISINFD64:
2460 case BUILT_IN_ISINFD128:
2461 /* These builtins have no optabs (yet). */
2462 break;
2463 default:
2464 gcc_unreachable ();
2467 /* There's no easy way to detect the case we need to set EDOM. */
2468 if (flag_errno_math && errno_set)
2469 return CODE_FOR_nothing;
2471 /* Optab mode depends on the mode of the input argument. */
2472 mode = TYPE_MODE (TREE_TYPE (arg));
2474 if (builtin_optab)
2475 return optab_handler (builtin_optab, mode);
2476 return CODE_FOR_nothing;
2479 /* Expand a call to one of the builtin math functions that operate on
2480 floating point argument and output an integer result (ilogb, isinf,
2481 isnan, etc).
2482 Return 0 if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2484 function; if convenient, the result should be placed in TARGET. */
2486 static rtx
2487 expand_builtin_interclass_mathfn (tree exp, rtx target)
2489 enum insn_code icode = CODE_FOR_nothing;
2490 rtx op0;
2491 tree fndecl = get_callee_fndecl (exp);
2492 machine_mode mode;
2493 tree arg;
2495 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2496 return NULL_RTX;
2498 arg = CALL_EXPR_ARG (exp, 0);
2499 icode = interclass_mathfn_icode (arg, fndecl);
2500 mode = TYPE_MODE (TREE_TYPE (arg));
2502 if (icode != CODE_FOR_nothing)
2504 class expand_operand ops[1];
2505 rtx_insn *last = get_last_insn ();
2506 tree orig_arg = arg;
2508 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2509 need to expand the argument again. This way, we will not perform
2510 side-effects more the once. */
2511 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2513 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2515 if (mode != GET_MODE (op0))
2516 op0 = convert_to_mode (mode, op0, 0);
2518 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2519 if (maybe_legitimize_operands (icode, 0, 1, ops)
2520 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2521 return ops[0].value;
2523 delete_insns_since (last);
2524 CALL_EXPR_ARG (exp, 0) = orig_arg;
2527 return NULL_RTX;
2530 /* Expand a call to the builtin sincos math function.
2531 Return NULL_RTX if a normal call should be emitted rather than expanding the
2532 function in-line. EXP is the expression that is a call to the builtin
2533 function. */
2535 static rtx
2536 expand_builtin_sincos (tree exp)
2538 rtx op0, op1, op2, target1, target2;
2539 machine_mode mode;
2540 tree arg, sinp, cosp;
2541 int result;
2542 location_t loc = EXPR_LOCATION (exp);
2543 tree alias_type, alias_off;
2545 if (!validate_arglist (exp, REAL_TYPE,
2546 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2547 return NULL_RTX;
2549 arg = CALL_EXPR_ARG (exp, 0);
2550 sinp = CALL_EXPR_ARG (exp, 1);
2551 cosp = CALL_EXPR_ARG (exp, 2);
2553 /* Make a suitable register to place result in. */
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2556 /* Check if sincos insn is available, otherwise emit the call. */
2557 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2558 return NULL_RTX;
2560 target1 = gen_reg_rtx (mode);
2561 target2 = gen_reg_rtx (mode);
2563 op0 = expand_normal (arg);
2564 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2565 alias_off = build_int_cst (alias_type, 0);
2566 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2567 sinp, alias_off));
2568 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2569 cosp, alias_off));
2571 /* Compute into target1 and target2.
2572 Set TARGET to wherever the result comes back. */
2573 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2574 gcc_assert (result);
2576 /* Move target1 and target2 to the memory locations indicated
2577 by op1 and op2. */
2578 emit_move_insn (op1, target1);
2579 emit_move_insn (op2, target2);
2581 return const0_rtx;
2584 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2585 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2586 static rtx
2587 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2589 if (!validate_arglist (exp, VOID_TYPE))
2590 return NULL_RTX;
2592 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2593 if (icode == CODE_FOR_nothing)
2594 return NULL_RTX;
2596 if (target == 0
2597 || GET_MODE (target) != target_mode
2598 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2599 target = gen_reg_rtx (target_mode);
2601 rtx pat = GEN_FCN (icode) (target);
2602 if (!pat)
2603 return NULL_RTX;
2604 emit_insn (pat);
2606 return target;
2609 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2610 fenv.h), returning the result and setting it in TARGET. Otherwise return
2611 NULL_RTX on failure. */
2612 static rtx
2613 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2614 machine_mode target_mode, optab op_optab)
2616 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2617 return NULL_RTX;
2618 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2620 insn_code icode = direct_optab_handler (op_optab, SImode);
2621 if (icode == CODE_FOR_nothing)
2622 return NULL_RTX;
2624 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2625 return NULL_RTX;
2627 if (target == 0
2628 || GET_MODE (target) != target_mode
2629 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2630 target = gen_reg_rtx (target_mode);
2632 rtx pat = GEN_FCN (icode) (target, op0);
2633 if (!pat)
2634 return NULL_RTX;
2635 emit_insn (pat);
2637 return target;
2640 /* Expand a call to the internal cexpi builtin to the sincos math function.
2641 EXP is the expression that is a call to the builtin function; if convenient,
2642 the result should be placed in TARGET. */
2644 static rtx
2645 expand_builtin_cexpi (tree exp, rtx target)
2647 tree fndecl = get_callee_fndecl (exp);
2648 tree arg, type;
2649 machine_mode mode;
2650 rtx op0, op1, op2;
2651 location_t loc = EXPR_LOCATION (exp);
2653 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2654 return NULL_RTX;
2656 arg = CALL_EXPR_ARG (exp, 0);
2657 type = TREE_TYPE (arg);
2658 mode = TYPE_MODE (TREE_TYPE (arg));
2660 /* Try expanding via a sincos optab, fall back to emitting a libcall
2661 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2662 is only generated from sincos, cexp or if we have either of them. */
2663 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2665 op1 = gen_reg_rtx (mode);
2666 op2 = gen_reg_rtx (mode);
2668 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2670 /* Compute into op1 and op2. */
2671 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2673 else if (targetm.libc_has_function (function_sincos, type))
2675 tree call, fn = NULL_TREE;
2676 tree top1, top2;
2677 rtx op1a, op2a;
2679 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2680 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2681 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2682 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2683 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2684 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2685 else
2686 gcc_unreachable ();
2688 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2689 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2690 op1a = copy_addr_to_reg (XEXP (op1, 0));
2691 op2a = copy_addr_to_reg (XEXP (op2, 0));
2692 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2693 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2695 /* Make sure not to fold the sincos call again. */
2696 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2697 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2698 call, 3, arg, top1, top2));
2700 else
2702 tree call, fn = NULL_TREE, narg;
2703 tree ctype = build_complex_type (type);
2705 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2706 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2707 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2708 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2709 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2710 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2711 else
2712 gcc_unreachable ();
2714 /* If we don't have a decl for cexp create one. This is the
2715 friendliest fallback if the user calls __builtin_cexpi
2716 without full target C99 function support. */
2717 if (fn == NULL_TREE)
2719 tree fntype;
2720 const char *name = NULL;
2722 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2723 name = "cexpf";
2724 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2725 name = "cexp";
2726 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2727 name = "cexpl";
2729 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2730 fn = build_fn_decl (name, fntype);
2733 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2734 build_real (type, dconst0), arg);
2736 /* Make sure not to fold the cexp call again. */
2737 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2738 return expand_expr (build_call_nary (ctype, call, 1, narg),
2739 target, VOIDmode, EXPAND_NORMAL);
2742 /* Now build the proper return type. */
2743 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2744 make_tree (TREE_TYPE (arg), op2),
2745 make_tree (TREE_TYPE (arg), op1)),
2746 target, VOIDmode, EXPAND_NORMAL);
2749 /* Conveniently construct a function call expression. FNDECL names the
2750 function to be called, N is the number of arguments, and the "..."
2751 parameters are the argument expressions. Unlike build_call_exr
2752 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2754 static tree
2755 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2757 va_list ap;
2758 tree fntype = TREE_TYPE (fndecl);
2759 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2761 va_start (ap, n);
2762 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2763 va_end (ap);
2764 SET_EXPR_LOCATION (fn, loc);
2765 return fn;
2768 /* Expand the __builtin_issignaling builtin. This needs to handle
2769 all floating point formats that do support NaNs (for those that
2770 don't it just sets target to 0). */
2772 static rtx
2773 expand_builtin_issignaling (tree exp, rtx target)
2775 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2776 return NULL_RTX;
2778 tree arg = CALL_EXPR_ARG (exp, 0);
2779 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2780 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2782 /* Expand the argument yielding a RTX expression. */
2783 rtx temp = expand_normal (arg);
2785 /* If mode doesn't support NaN, always return 0.
2786 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2787 __builtin_issignaling working without -fsignaling-nans. Especially
2788 when -fno-signaling-nans is the default.
2789 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2790 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2791 fold to 0 or non-NaN/Inf classification. */
2792 if (!HONOR_NANS (fmode))
2794 emit_move_insn (target, const0_rtx);
2795 return target;
2798 /* Check if the back end provides an insn that handles issignaling for the
2799 argument's mode. */
2800 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2801 if (icode != CODE_FOR_nothing)
2803 rtx_insn *last = get_last_insn ();
2804 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2805 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2806 return this_target;
2807 delete_insns_since (last);
2810 if (DECIMAL_FLOAT_MODE_P (fmode))
2812 scalar_int_mode imode;
2813 rtx hi;
2814 switch (fmt->ieee_bits)
2816 case 32:
2817 case 64:
2818 imode = int_mode_for_mode (fmode).require ();
2819 temp = gen_lowpart (imode, temp);
2820 break;
2821 case 128:
2822 imode = int_mode_for_size (64, 1).require ();
2823 hi = NULL_RTX;
2824 /* For decimal128, TImode support isn't always there and even when
2825 it is, working on the DImode high part is usually better. */
2826 if (!MEM_P (temp))
2828 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2829 subreg_highpart_offset (imode,
2830 fmode)))
2831 hi = t;
2832 else
2834 scalar_int_mode imode2;
2835 if (int_mode_for_mode (fmode).exists (&imode2))
2837 rtx temp2 = gen_lowpart (imode2, temp);
2838 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2839 if (rtx t = simplify_gen_subreg (imode, temp2,
2840 imode2, off))
2841 hi = t;
2844 if (!hi)
2846 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2847 emit_move_insn (mem, temp);
2848 temp = mem;
2851 if (!hi)
2853 poly_int64 offset
2854 = subreg_highpart_offset (imode, GET_MODE (temp));
2855 hi = adjust_address (temp, imode, offset);
2857 temp = hi;
2858 break;
2859 default:
2860 gcc_unreachable ();
2862 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2863 have 6 bits below it all set. */
2864 rtx val
2865 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2866 temp = expand_binop (imode, and_optab, temp, val,
2867 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2868 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2869 return temp;
2872 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2873 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2874 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2875 gcc_assert (MODE_COMPOSITE_P (fmode)
2876 || (fmt->pnan == fmt->p
2877 && fmt->signbit_ro == fmt->signbit_rw));
2879 switch (fmt->p)
2881 case 106: /* IBM double double */
2882 /* For IBM double double, recurse on the most significant double. */
2883 gcc_assert (MODE_COMPOSITE_P (fmode));
2884 temp = convert_modes (DFmode, fmode, temp, 0);
2885 fmode = DFmode;
2886 fmt = REAL_MODE_FORMAT (DFmode);
2887 /* FALLTHRU */
2888 case 8: /* bfloat */
2889 case 11: /* IEEE half */
2890 case 24: /* IEEE single */
2891 case 53: /* IEEE double or Intel extended with rounding to double */
2892 if (fmt->p == 53 && fmt->signbit_ro == 79)
2893 goto extended;
2895 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2896 temp = gen_lowpart (imode, temp);
2897 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2898 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2899 if (fmt->qnan_msb_set)
2901 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2902 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2903 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2904 ((temp ^ bit) & mask) > val. */
2905 temp = expand_binop (imode, xor_optab, temp, bit,
2906 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2907 temp = expand_binop (imode, and_optab, temp, mask,
2908 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2909 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2910 1, 1);
2912 else
2914 /* For MIPS/PA IEEE single/double, expand to:
2915 (temp & val) == val. */
2916 temp = expand_binop (imode, and_optab, temp, val,
2917 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2918 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2919 1, 1);
2922 break;
2923 case 113: /* IEEE quad */
2925 rtx hi = NULL_RTX, lo = NULL_RTX;
2926 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2927 /* For IEEE quad, TImode support isn't always there and even when
2928 it is, working on DImode parts is usually better. */
2929 if (!MEM_P (temp))
2931 hi = simplify_gen_subreg (imode, temp, fmode,
2932 subreg_highpart_offset (imode, fmode));
2933 lo = simplify_gen_subreg (imode, temp, fmode,
2934 subreg_lowpart_offset (imode, fmode));
2935 if (!hi || !lo)
2937 scalar_int_mode imode2;
2938 if (int_mode_for_mode (fmode).exists (&imode2))
2940 rtx temp2 = gen_lowpart (imode2, temp);
2941 hi = simplify_gen_subreg (imode, temp2, imode2,
2942 subreg_highpart_offset (imode,
2943 imode2));
2944 lo = simplify_gen_subreg (imode, temp2, imode2,
2945 subreg_lowpart_offset (imode,
2946 imode2));
2949 if (!hi || !lo)
2951 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2952 emit_move_insn (mem, temp);
2953 temp = mem;
2956 if (!hi || !lo)
2958 poly_int64 offset
2959 = subreg_highpart_offset (imode, GET_MODE (temp));
2960 hi = adjust_address (temp, imode, offset);
2961 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2962 lo = adjust_address (temp, imode, offset);
2964 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2965 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2966 if (fmt->qnan_msb_set)
2968 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2969 - 64)));
2970 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2971 /* For non-MIPS/PA IEEE quad, expand to:
2972 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2973 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2974 lo = expand_binop (imode, ior_optab, lo, nlo,
2975 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2976 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2977 temp = expand_binop (imode, xor_optab, hi, bit,
2978 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2979 temp = expand_binop (imode, ior_optab, temp, lo,
2980 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2981 temp = expand_binop (imode, and_optab, temp, mask,
2982 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2983 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2984 1, 1);
2986 else
2988 /* For MIPS/PA IEEE quad, expand to:
2989 (hi & val) == val. */
2990 temp = expand_binop (imode, and_optab, hi, val,
2991 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2992 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2993 1, 1);
2996 break;
2997 case 64: /* Intel or Motorola extended */
2998 extended:
3000 rtx ex, hi, lo;
3001 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3002 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3003 if (!MEM_P (temp))
3005 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3006 emit_move_insn (mem, temp);
3007 temp = mem;
3009 if (fmt->signbit_ro == 95)
3011 /* Motorola, always big endian, with 16-bit gap in between
3012 16-bit sign+exponent and 64-bit mantissa. */
3013 ex = adjust_address (temp, iemode, 0);
3014 hi = adjust_address (temp, imode, 4);
3015 lo = adjust_address (temp, imode, 8);
3017 else if (!WORDS_BIG_ENDIAN)
3019 /* Intel little endian, 64-bit mantissa followed by 16-bit
3020 sign+exponent and then either 16 or 48 bits of gap. */
3021 ex = adjust_address (temp, iemode, 8);
3022 hi = adjust_address (temp, imode, 4);
3023 lo = adjust_address (temp, imode, 0);
3025 else
3027 /* Big endian Itanium. */
3028 ex = adjust_address (temp, iemode, 0);
3029 hi = adjust_address (temp, imode, 2);
3030 lo = adjust_address (temp, imode, 6);
3032 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3033 gcc_assert (fmt->qnan_msb_set);
3034 rtx mask = GEN_INT (0x7fff);
3035 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3036 /* For Intel/Motorola extended format, expand to:
3037 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3038 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3039 lo = expand_binop (imode, ior_optab, lo, nlo,
3040 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3041 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3042 temp = expand_binop (imode, xor_optab, hi, bit,
3043 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3044 temp = expand_binop (imode, ior_optab, temp, lo,
3045 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3046 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3047 ex = expand_binop (iemode, and_optab, ex, mask,
3048 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3049 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3050 ex, mask, iemode, 1, 1);
3051 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3052 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3054 break;
3055 default:
3056 gcc_unreachable ();
3059 return temp;
3062 /* Expand a call to one of the builtin rounding functions gcc defines
3063 as an extension (lfloor and lceil). As these are gcc extensions we
3064 do not need to worry about setting errno to EDOM.
3065 If expanding via optab fails, lower expression to (int)(floor(x)).
3066 EXP is the expression that is a call to the builtin function;
3067 if convenient, the result should be placed in TARGET. */
3069 static rtx
3070 expand_builtin_int_roundingfn (tree exp, rtx target)
3072 convert_optab builtin_optab;
3073 rtx op0, tmp;
3074 rtx_insn *insns;
3075 tree fndecl = get_callee_fndecl (exp);
3076 enum built_in_function fallback_fn;
3077 tree fallback_fndecl;
3078 machine_mode mode;
3079 tree arg;
3081 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3082 return NULL_RTX;
3084 arg = CALL_EXPR_ARG (exp, 0);
3086 switch (DECL_FUNCTION_CODE (fndecl))
3088 CASE_FLT_FN (BUILT_IN_ICEIL):
3089 CASE_FLT_FN (BUILT_IN_LCEIL):
3090 CASE_FLT_FN (BUILT_IN_LLCEIL):
3091 builtin_optab = lceil_optab;
3092 fallback_fn = BUILT_IN_CEIL;
3093 break;
3095 CASE_FLT_FN (BUILT_IN_IFLOOR):
3096 CASE_FLT_FN (BUILT_IN_LFLOOR):
3097 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3098 builtin_optab = lfloor_optab;
3099 fallback_fn = BUILT_IN_FLOOR;
3100 break;
3102 default:
3103 gcc_unreachable ();
3106 /* Make a suitable register to place result in. */
3107 mode = TYPE_MODE (TREE_TYPE (exp));
3109 target = gen_reg_rtx (mode);
3111 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3112 need to expand the argument again. This way, we will not perform
3113 side-effects more the once. */
3114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3116 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3118 start_sequence ();
3120 /* Compute into TARGET. */
3121 if (expand_sfix_optab (target, op0, builtin_optab))
3123 /* Output the entire sequence. */
3124 insns = get_insns ();
3125 end_sequence ();
3126 emit_insn (insns);
3127 return target;
3130 /* If we were unable to expand via the builtin, stop the sequence
3131 (without outputting the insns). */
3132 end_sequence ();
3134 /* Fall back to floating point rounding optab. */
3135 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3137 /* For non-C99 targets we may end up without a fallback fndecl here
3138 if the user called __builtin_lfloor directly. In this case emit
3139 a call to the floor/ceil variants nevertheless. This should result
3140 in the best user experience for not full C99 targets. */
3141 if (fallback_fndecl == NULL_TREE)
3143 tree fntype;
3144 const char *name = NULL;
3146 switch (DECL_FUNCTION_CODE (fndecl))
3148 case BUILT_IN_ICEIL:
3149 case BUILT_IN_LCEIL:
3150 case BUILT_IN_LLCEIL:
3151 name = "ceil";
3152 break;
3153 case BUILT_IN_ICEILF:
3154 case BUILT_IN_LCEILF:
3155 case BUILT_IN_LLCEILF:
3156 name = "ceilf";
3157 break;
3158 case BUILT_IN_ICEILL:
3159 case BUILT_IN_LCEILL:
3160 case BUILT_IN_LLCEILL:
3161 name = "ceill";
3162 break;
3163 case BUILT_IN_IFLOOR:
3164 case BUILT_IN_LFLOOR:
3165 case BUILT_IN_LLFLOOR:
3166 name = "floor";
3167 break;
3168 case BUILT_IN_IFLOORF:
3169 case BUILT_IN_LFLOORF:
3170 case BUILT_IN_LLFLOORF:
3171 name = "floorf";
3172 break;
3173 case BUILT_IN_IFLOORL:
3174 case BUILT_IN_LFLOORL:
3175 case BUILT_IN_LLFLOORL:
3176 name = "floorl";
3177 break;
3178 default:
3179 gcc_unreachable ();
3182 fntype = build_function_type_list (TREE_TYPE (arg),
3183 TREE_TYPE (arg), NULL_TREE);
3184 fallback_fndecl = build_fn_decl (name, fntype);
3187 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3189 tmp = expand_normal (exp);
3190 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3192 /* Truncate the result of floating point optab to integer
3193 via expand_fix (). */
3194 target = gen_reg_rtx (mode);
3195 expand_fix (target, tmp, 0);
3197 return target;
3200 /* Expand a call to one of the builtin math functions doing integer
3201 conversion (lrint).
3202 Return 0 if a normal call should be emitted rather than expanding the
3203 function in-line. EXP is the expression that is a call to the builtin
3204 function; if convenient, the result should be placed in TARGET. */
3206 static rtx
3207 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3209 convert_optab builtin_optab;
3210 rtx op0;
3211 rtx_insn *insns;
3212 tree fndecl = get_callee_fndecl (exp);
3213 tree arg;
3214 machine_mode mode;
3215 enum built_in_function fallback_fn = BUILT_IN_NONE;
3217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3218 return NULL_RTX;
3220 arg = CALL_EXPR_ARG (exp, 0);
3222 switch (DECL_FUNCTION_CODE (fndecl))
3224 CASE_FLT_FN (BUILT_IN_IRINT):
3225 fallback_fn = BUILT_IN_LRINT;
3226 gcc_fallthrough ();
3227 CASE_FLT_FN (BUILT_IN_LRINT):
3228 CASE_FLT_FN (BUILT_IN_LLRINT):
3229 builtin_optab = lrint_optab;
3230 break;
3232 CASE_FLT_FN (BUILT_IN_IROUND):
3233 fallback_fn = BUILT_IN_LROUND;
3234 gcc_fallthrough ();
3235 CASE_FLT_FN (BUILT_IN_LROUND):
3236 CASE_FLT_FN (BUILT_IN_LLROUND):
3237 builtin_optab = lround_optab;
3238 break;
3240 default:
3241 gcc_unreachable ();
3244 /* There's no easy way to detect the case we need to set EDOM. */
3245 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3246 return NULL_RTX;
3248 /* Make a suitable register to place result in. */
3249 mode = TYPE_MODE (TREE_TYPE (exp));
3251 /* There's no easy way to detect the case we need to set EDOM. */
3252 if (!flag_errno_math)
3254 rtx result = gen_reg_rtx (mode);
3256 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3257 need to expand the argument again. This way, we will not perform
3258 side-effects more the once. */
3259 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3261 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3263 start_sequence ();
3265 if (expand_sfix_optab (result, op0, builtin_optab))
3267 /* Output the entire sequence. */
3268 insns = get_insns ();
3269 end_sequence ();
3270 emit_insn (insns);
3271 return result;
3274 /* If we were unable to expand via the builtin, stop the sequence
3275 (without outputting the insns) and call to the library function
3276 with the stabilized argument list. */
3277 end_sequence ();
3280 if (fallback_fn != BUILT_IN_NONE)
3282 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3283 targets, (int) round (x) should never be transformed into
3284 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3285 a call to lround in the hope that the target provides at least some
3286 C99 functions. This should result in the best user experience for
3287 not full C99 targets.
3288 As scalar float conversions with same mode are useless in GIMPLE,
3289 we can end up e.g. with _Float32 argument passed to float builtin,
3290 try to get the type from the builtin prototype first. */
3291 tree fallback_fndecl = NULL_TREE;
3292 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3293 fallback_fndecl
3294 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3295 as_combined_fn (fallback_fn), 0);
3296 if (fallback_fndecl == NULL_TREE)
3297 fallback_fndecl
3298 = mathfn_built_in_1 (TREE_TYPE (arg),
3299 as_combined_fn (fallback_fn), 0);
3300 if (fallback_fndecl)
3302 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3303 fallback_fndecl, 1, arg);
3305 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3306 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3307 return convert_to_mode (mode, target, 0);
3311 return expand_call (exp, target, target == const0_rtx);
3314 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3315 a normal call should be emitted rather than expanding the function
3316 in-line. EXP is the expression that is a call to the builtin
3317 function; if convenient, the result should be placed in TARGET. */
3319 static rtx
3320 expand_builtin_powi (tree exp, rtx target)
3322 tree arg0, arg1;
3323 rtx op0, op1;
3324 machine_mode mode;
3325 machine_mode mode2;
3327 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3328 return NULL_RTX;
3330 arg0 = CALL_EXPR_ARG (exp, 0);
3331 arg1 = CALL_EXPR_ARG (exp, 1);
3332 mode = TYPE_MODE (TREE_TYPE (exp));
3334 /* Emit a libcall to libgcc. */
3336 /* Mode of the 2nd argument must match that of an int. */
3337 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3339 if (target == NULL_RTX)
3340 target = gen_reg_rtx (mode);
3342 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3343 if (GET_MODE (op0) != mode)
3344 op0 = convert_to_mode (mode, op0, 0);
3345 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3346 if (GET_MODE (op1) != mode2)
3347 op1 = convert_to_mode (mode2, op1, 0);
3349 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3350 target, LCT_CONST, mode,
3351 op0, mode, op1, mode2);
3353 return target;
3356 /* Expand expression EXP which is a call to the strlen builtin. Return
3357 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3358 try to get the result in TARGET, if convenient. */
3360 static rtx
3361 expand_builtin_strlen (tree exp, rtx target,
3362 machine_mode target_mode)
3364 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3365 return NULL_RTX;
3367 tree src = CALL_EXPR_ARG (exp, 0);
3369 /* If the length can be computed at compile-time, return it. */
3370 if (tree len = c_strlen (src, 0))
3371 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3373 /* If the length can be computed at compile-time and is constant
3374 integer, but there are side-effects in src, evaluate
3375 src for side-effects, then return len.
3376 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3377 can be optimized into: i++; x = 3; */
3378 tree len = c_strlen (src, 1);
3379 if (len && TREE_CODE (len) == INTEGER_CST)
3381 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3382 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3385 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3387 /* If SRC is not a pointer type, don't do this operation inline. */
3388 if (align == 0)
3389 return NULL_RTX;
3391 /* Bail out if we can't compute strlen in the right mode. */
3392 machine_mode insn_mode;
3393 enum insn_code icode = CODE_FOR_nothing;
3394 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3396 icode = optab_handler (strlen_optab, insn_mode);
3397 if (icode != CODE_FOR_nothing)
3398 break;
3400 if (insn_mode == VOIDmode)
3401 return NULL_RTX;
3403 /* Make a place to hold the source address. We will not expand
3404 the actual source until we are sure that the expansion will
3405 not fail -- there are trees that cannot be expanded twice. */
3406 rtx src_reg = gen_reg_rtx (Pmode);
3408 /* Mark the beginning of the strlen sequence so we can emit the
3409 source operand later. */
3410 rtx_insn *before_strlen = get_last_insn ();
3412 class expand_operand ops[4];
3413 create_output_operand (&ops[0], target, insn_mode);
3414 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3415 create_integer_operand (&ops[2], 0);
3416 create_integer_operand (&ops[3], align);
3417 if (!maybe_expand_insn (icode, 4, ops))
3418 return NULL_RTX;
3420 /* Check to see if the argument was declared attribute nonstring
3421 and if so, issue a warning since at this point it's not known
3422 to be nul-terminated. */
3423 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3425 /* Now that we are assured of success, expand the source. */
3426 start_sequence ();
3427 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3428 if (pat != src_reg)
3430 #ifdef POINTERS_EXTEND_UNSIGNED
3431 if (GET_MODE (pat) != Pmode)
3432 pat = convert_to_mode (Pmode, pat,
3433 POINTERS_EXTEND_UNSIGNED);
3434 #endif
3435 emit_move_insn (src_reg, pat);
3437 pat = get_insns ();
3438 end_sequence ();
3440 if (before_strlen)
3441 emit_insn_after (pat, before_strlen);
3442 else
3443 emit_insn_before (pat, get_insns ());
3445 /* Return the value in the proper mode for this function. */
3446 if (GET_MODE (ops[0].value) == target_mode)
3447 target = ops[0].value;
3448 else if (target != 0)
3449 convert_move (target, ops[0].value, 0);
3450 else
3451 target = convert_to_mode (target_mode, ops[0].value, 0);
3453 return target;
3456 /* Expand call EXP to the strnlen built-in, returning the result
3457 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3459 static rtx
3460 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3462 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3463 return NULL_RTX;
3465 tree src = CALL_EXPR_ARG (exp, 0);
3466 tree bound = CALL_EXPR_ARG (exp, 1);
3468 if (!bound)
3469 return NULL_RTX;
3471 location_t loc = UNKNOWN_LOCATION;
3472 if (EXPR_HAS_LOCATION (exp))
3473 loc = EXPR_LOCATION (exp);
3475 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3476 so these conversions aren't necessary. */
3477 c_strlen_data lendata = { };
3478 tree len = c_strlen (src, 0, &lendata, 1);
3479 if (len)
3480 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3482 if (TREE_CODE (bound) == INTEGER_CST)
3484 if (!len)
3485 return NULL_RTX;
3487 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3488 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3491 if (TREE_CODE (bound) != SSA_NAME)
3492 return NULL_RTX;
3494 wide_int min, max;
3495 value_range r;
3496 get_global_range_query ()->range_of_expr (r, bound);
3497 if (r.kind () != VR_RANGE)
3498 return NULL_RTX;
3499 min = r.lower_bound ();
3500 max = r.upper_bound ();
3502 if (!len || TREE_CODE (len) != INTEGER_CST)
3504 bool exact;
3505 lendata.decl = unterminated_array (src, &len, &exact);
3506 if (!lendata.decl)
3507 return NULL_RTX;
3510 if (lendata.decl)
3511 return NULL_RTX;
3513 if (wi::gtu_p (min, wi::to_wide (len)))
3514 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3516 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3517 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3520 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3521 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3522 a target constant. */
3524 static rtx
3525 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3526 fixed_size_mode mode)
3528 /* The REPresentation pointed to by DATA need not be a nul-terminated
3529 string but the caller guarantees it's large enough for MODE. */
3530 const char *rep = (const char *) data;
3532 /* The by-pieces infrastructure does not try to pick a vector mode
3533 for memcpy expansion. */
3534 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3535 /*nul_terminated=*/false);
3538 /* LEN specify length of the block of memcpy/memset operation.
3539 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3540 In some cases we can make very likely guess on max size, then we
3541 set it into PROBABLE_MAX_SIZE. */
3543 static void
3544 determine_block_size (tree len, rtx len_rtx,
3545 unsigned HOST_WIDE_INT *min_size,
3546 unsigned HOST_WIDE_INT *max_size,
3547 unsigned HOST_WIDE_INT *probable_max_size)
3549 if (CONST_INT_P (len_rtx))
3551 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3552 return;
3554 else
3556 wide_int min, max;
3557 enum value_range_kind range_type = VR_UNDEFINED;
3559 /* Determine bounds from the type. */
3560 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3561 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3562 else
3563 *min_size = 0;
3564 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3565 *probable_max_size = *max_size
3566 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3567 else
3568 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3570 if (TREE_CODE (len) == SSA_NAME)
3572 value_range r;
3573 get_global_range_query ()->range_of_expr (r, len);
3574 range_type = r.kind ();
3575 if (range_type != VR_UNDEFINED)
3577 min = wi::to_wide (r.min ());
3578 max = wi::to_wide (r.max ());
3581 if (range_type == VR_RANGE)
3583 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3584 *min_size = min.to_uhwi ();
3585 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3586 *probable_max_size = *max_size = max.to_uhwi ();
3588 else if (range_type == VR_ANTI_RANGE)
3590 /* Code like
3592 int n;
3593 if (n < 100)
3594 memcpy (a, b, n)
3596 Produce anti range allowing negative values of N. We still
3597 can use the information and make a guess that N is not negative.
3599 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3600 *probable_max_size = min.to_uhwi () - 1;
3603 gcc_checking_assert (*max_size <=
3604 (unsigned HOST_WIDE_INT)
3605 GET_MODE_MASK (GET_MODE (len_rtx)));
3608 /* Expand a call EXP to the memcpy builtin.
3609 Return NULL_RTX if we failed, the caller should emit a normal call,
3610 otherwise try to get the result in TARGET, if convenient (and in
3611 mode MODE if that's convenient). */
3613 static rtx
3614 expand_builtin_memcpy (tree exp, rtx target)
3616 if (!validate_arglist (exp,
3617 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3618 return NULL_RTX;
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3624 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3625 /*retmode=*/ RETURN_BEGIN, false);
3628 /* Check a call EXP to the memmove built-in for validity.
3629 Return NULL_RTX on both success and failure. */
3631 static rtx
3632 expand_builtin_memmove (tree exp, rtx target)
3634 if (!validate_arglist (exp,
3635 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636 return NULL_RTX;
3638 tree dest = CALL_EXPR_ARG (exp, 0);
3639 tree src = CALL_EXPR_ARG (exp, 1);
3640 tree len = CALL_EXPR_ARG (exp, 2);
3642 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3643 /*retmode=*/ RETURN_BEGIN, true);
3646 /* Expand a call EXP to the mempcpy builtin.
3647 Return NULL_RTX if we failed; the caller should emit a normal call,
3648 otherwise try to get the result in TARGET, if convenient (and in
3649 mode MODE if that's convenient). */
3651 static rtx
3652 expand_builtin_mempcpy (tree exp, rtx target)
3654 if (!validate_arglist (exp,
3655 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3656 return NULL_RTX;
3658 tree dest = CALL_EXPR_ARG (exp, 0);
3659 tree src = CALL_EXPR_ARG (exp, 1);
3660 tree len = CALL_EXPR_ARG (exp, 2);
3662 /* Policy does not generally allow using compute_objsize (which
3663 is used internally by check_memop_size) to change code generation
3664 or drive optimization decisions.
3666 In this instance it is safe because the code we generate has
3667 the same semantics regardless of the return value of
3668 check_memop_sizes. Exactly the same amount of data is copied
3669 and the return value is exactly the same in both cases.
3671 Furthermore, check_memop_size always uses mode 0 for the call to
3672 compute_objsize, so the imprecise nature of compute_objsize is
3673 avoided. */
3675 /* Avoid expanding mempcpy into memcpy when the call is determined
3676 to overflow the buffer. This also prevents the same overflow
3677 from being diagnosed again when expanding memcpy. */
3679 return expand_builtin_mempcpy_args (dest, src, len,
3680 target, exp, /*retmode=*/ RETURN_END);
3683 /* Helper function to do the actual work for expand of memory copy family
3684 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3685 of memory from SRC to DEST and assign to TARGET if convenient. Return
3686 value is based on RETMODE argument. */
3688 static rtx
3689 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3690 rtx target, tree exp, memop_ret retmode,
3691 bool might_overlap)
3693 unsigned int src_align = get_pointer_alignment (src);
3694 unsigned int dest_align = get_pointer_alignment (dest);
3695 rtx dest_mem, src_mem, dest_addr, len_rtx;
3696 HOST_WIDE_INT expected_size = -1;
3697 unsigned int expected_align = 0;
3698 unsigned HOST_WIDE_INT min_size;
3699 unsigned HOST_WIDE_INT max_size;
3700 unsigned HOST_WIDE_INT probable_max_size;
3702 bool is_move_done;
3704 /* If DEST is not a pointer type, call the normal function. */
3705 if (dest_align == 0)
3706 return NULL_RTX;
3708 /* If either SRC is not a pointer type, don't do this
3709 operation in-line. */
3710 if (src_align == 0)
3711 return NULL_RTX;
3713 if (currently_expanding_gimple_stmt)
3714 stringop_block_profile (currently_expanding_gimple_stmt,
3715 &expected_align, &expected_size);
3717 if (expected_align < dest_align)
3718 expected_align = dest_align;
3719 dest_mem = get_memory_rtx (dest, len);
3720 set_mem_align (dest_mem, dest_align);
3721 len_rtx = expand_normal (len);
3722 determine_block_size (len, len_rtx, &min_size, &max_size,
3723 &probable_max_size);
3725 /* Try to get the byte representation of the constant SRC points to,
3726 with its byte size in NBYTES. */
3727 unsigned HOST_WIDE_INT nbytes;
3728 const char *rep = getbyterep (src, &nbytes);
3730 /* If the function's constant bound LEN_RTX is less than or equal
3731 to the byte size of the representation of the constant argument,
3732 and if block move would be done by pieces, we can avoid loading
3733 the bytes from memory and only store the computed constant.
3734 This works in the overlap (memmove) case as well because
3735 store_by_pieces just generates a series of stores of constants
3736 from the representation returned by getbyterep(). */
3737 if (rep
3738 && CONST_INT_P (len_rtx)
3739 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3740 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3741 CONST_CAST (char *, rep),
3742 dest_align, false))
3744 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3745 builtin_memcpy_read_str,
3746 CONST_CAST (char *, rep),
3747 dest_align, false, retmode);
3748 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3749 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3750 return dest_mem;
3753 src_mem = get_memory_rtx (src, len);
3754 set_mem_align (src_mem, src_align);
3756 /* Copy word part most expediently. */
3757 enum block_op_methods method = BLOCK_OP_NORMAL;
3758 if (CALL_EXPR_TAILCALL (exp)
3759 && (retmode == RETURN_BEGIN || target == const0_rtx))
3760 method = BLOCK_OP_TAILCALL;
3761 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3762 && retmode == RETURN_END
3763 && !might_overlap
3764 && target != const0_rtx);
3765 if (use_mempcpy_call)
3766 method = BLOCK_OP_NO_LIBCALL_RET;
3767 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3768 expected_align, expected_size,
3769 min_size, max_size, probable_max_size,
3770 use_mempcpy_call, &is_move_done,
3771 might_overlap);
3773 /* Bail out when a mempcpy call would be expanded as libcall and when
3774 we have a target that provides a fast implementation
3775 of mempcpy routine. */
3776 if (!is_move_done)
3777 return NULL_RTX;
3779 if (dest_addr == pc_rtx)
3780 return NULL_RTX;
3782 if (dest_addr == 0)
3784 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3785 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3788 if (retmode != RETURN_BEGIN && target != const0_rtx)
3790 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3791 /* stpcpy pointer to last byte. */
3792 if (retmode == RETURN_END_MINUS_ONE)
3793 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3796 return dest_addr;
3799 static rtx
3800 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3801 rtx target, tree orig_exp, memop_ret retmode)
3803 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3804 retmode, false);
3807 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3808 we failed, the caller should emit a normal call, otherwise try to
3809 get the result in TARGET, if convenient.
3810 Return value is based on RETMODE argument. */
3812 static rtx
3813 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3815 class expand_operand ops[3];
3816 rtx dest_mem;
3817 rtx src_mem;
3819 if (!targetm.have_movstr ())
3820 return NULL_RTX;
3822 dest_mem = get_memory_rtx (dest, NULL);
3823 src_mem = get_memory_rtx (src, NULL);
3824 if (retmode == RETURN_BEGIN)
3826 target = force_reg (Pmode, XEXP (dest_mem, 0));
3827 dest_mem = replace_equiv_address (dest_mem, target);
3830 create_output_operand (&ops[0],
3831 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3832 create_fixed_operand (&ops[1], dest_mem);
3833 create_fixed_operand (&ops[2], src_mem);
3834 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3835 return NULL_RTX;
3837 if (retmode != RETURN_BEGIN && target != const0_rtx)
3839 target = ops[0].value;
3840 /* movstr is supposed to set end to the address of the NUL
3841 terminator. If the caller requested a mempcpy-like return value,
3842 adjust it. */
3843 if (retmode == RETURN_END)
3845 rtx tem = plus_constant (GET_MODE (target),
3846 gen_lowpart (GET_MODE (target), target), 1);
3847 emit_move_insn (target, force_operand (tem, NULL_RTX));
3850 return target;
3853 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3854 NULL_RTX if we failed the caller should emit a normal call, otherwise
3855 try to get the result in TARGET, if convenient (and in mode MODE if that's
3856 convenient). */
3858 static rtx
3859 expand_builtin_strcpy (tree exp, rtx target)
3861 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3862 return NULL_RTX;
3864 tree dest = CALL_EXPR_ARG (exp, 0);
3865 tree src = CALL_EXPR_ARG (exp, 1);
3867 return expand_builtin_strcpy_args (exp, dest, src, target);
3870 /* Helper function to do the actual work for expand_builtin_strcpy. The
3871 arguments to the builtin_strcpy call DEST and SRC are broken out
3872 so that this can also be called without constructing an actual CALL_EXPR.
3873 The other arguments and return value are the same as for
3874 expand_builtin_strcpy. */
3876 static rtx
3877 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3879 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3882 /* Expand a call EXP to the stpcpy builtin.
3883 Return NULL_RTX if we failed the caller should emit a normal call,
3884 otherwise try to get the result in TARGET, if convenient (and in
3885 mode MODE if that's convenient). */
3887 static rtx
3888 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3890 tree dst, src;
3891 location_t loc = EXPR_LOCATION (exp);
3893 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3894 return NULL_RTX;
3896 dst = CALL_EXPR_ARG (exp, 0);
3897 src = CALL_EXPR_ARG (exp, 1);
3899 /* If return value is ignored, transform stpcpy into strcpy. */
3900 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3902 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3903 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3904 return expand_expr (result, target, mode, EXPAND_NORMAL);
3906 else
3908 tree len, lenp1;
3909 rtx ret;
3911 /* Ensure we get an actual string whose length can be evaluated at
3912 compile-time, not an expression containing a string. This is
3913 because the latter will potentially produce pessimized code
3914 when used to produce the return value. */
3915 c_strlen_data lendata = { };
3916 if (!c_getstr (src)
3917 || !(len = c_strlen (src, 0, &lendata, 1)))
3918 return expand_movstr (dst, src, target,
3919 /*retmode=*/ RETURN_END_MINUS_ONE);
3921 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3922 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3923 target, exp,
3924 /*retmode=*/ RETURN_END_MINUS_ONE);
3926 if (ret)
3927 return ret;
3929 if (TREE_CODE (len) == INTEGER_CST)
3931 rtx len_rtx = expand_normal (len);
3933 if (CONST_INT_P (len_rtx))
3935 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3937 if (ret)
3939 if (! target)
3941 if (mode != VOIDmode)
3942 target = gen_reg_rtx (mode);
3943 else
3944 target = gen_reg_rtx (GET_MODE (ret));
3946 if (GET_MODE (target) != GET_MODE (ret))
3947 ret = gen_lowpart (GET_MODE (target), ret);
3949 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3950 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3951 gcc_assert (ret);
3953 return target;
3958 return expand_movstr (dst, src, target,
3959 /*retmode=*/ RETURN_END_MINUS_ONE);
3963 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3964 arguments while being careful to avoid duplicate warnings (which could
3965 be issued if the expander were to expand the call, resulting in it
3966 being emitted in expand_call(). */
3968 static rtx
3969 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3971 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3973 /* The call has been successfully expanded. Check for nonstring
3974 arguments and issue warnings as appropriate. */
3975 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3976 return ret;
3979 return NULL_RTX;
3982 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3983 bytes from constant string DATA + OFFSET and return it as target
3984 constant. */
3987 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3988 fixed_size_mode mode)
3990 const char *str = (const char *) data;
3992 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3993 return const0_rtx;
3995 /* The by-pieces infrastructure does not try to pick a vector mode
3996 for strncpy expansion. */
3997 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
4000 /* Helper to check the sizes of sequences and the destination of calls
4001 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4002 success (no overflow or invalid sizes), false otherwise. */
4004 static bool
4005 check_strncat_sizes (tree exp, tree objsize)
4007 tree dest = CALL_EXPR_ARG (exp, 0);
4008 tree src = CALL_EXPR_ARG (exp, 1);
4009 tree maxread = CALL_EXPR_ARG (exp, 2);
4011 /* Try to determine the range of lengths that the source expression
4012 refers to. */
4013 c_strlen_data lendata = { };
4014 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4016 /* Try to verify that the destination is big enough for the shortest
4017 string. */
4019 access_data data (nullptr, exp, access_read_write, maxread, true);
4020 if (!objsize && warn_stringop_overflow)
4022 /* If it hasn't been provided by __strncat_chk, try to determine
4023 the size of the destination object into which the source is
4024 being copied. */
4025 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4028 /* Add one for the terminating nul. */
4029 tree srclen = (lendata.minlen
4030 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4031 size_one_node)
4032 : NULL_TREE);
4034 /* The strncat function copies at most MAXREAD bytes and always appends
4035 the terminating nul so the specified upper bound should never be equal
4036 to (or greater than) the size of the destination. */
4037 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4038 && tree_int_cst_equal (objsize, maxread))
4040 location_t loc = EXPR_LOCATION (exp);
4041 warning_at (loc, OPT_Wstringop_overflow_,
4042 "%qD specified bound %E equals destination size",
4043 get_callee_fndecl (exp), maxread);
4045 return false;
4048 if (!srclen
4049 || (maxread && tree_fits_uhwi_p (maxread)
4050 && tree_fits_uhwi_p (srclen)
4051 && tree_int_cst_lt (maxread, srclen)))
4052 srclen = maxread;
4054 /* The number of bytes to write is LEN but check_access will alsoa
4055 check SRCLEN if LEN's value isn't known. */
4056 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4057 objsize, data.mode, &data);
4060 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4061 NULL_RTX if we failed the caller should emit a normal call. */
4063 static rtx
4064 expand_builtin_strncpy (tree exp, rtx target)
4066 location_t loc = EXPR_LOCATION (exp);
4068 if (!validate_arglist (exp,
4069 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4070 return NULL_RTX;
4071 tree dest = CALL_EXPR_ARG (exp, 0);
4072 tree src = CALL_EXPR_ARG (exp, 1);
4073 /* The number of bytes to write (not the maximum). */
4074 tree len = CALL_EXPR_ARG (exp, 2);
4076 /* The length of the source sequence. */
4077 tree slen = c_strlen (src, 1);
4079 /* We must be passed a constant len and src parameter. */
4080 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4081 return NULL_RTX;
4083 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4085 /* We're required to pad with trailing zeros if the requested
4086 len is greater than strlen(s2)+1. In that case try to
4087 use store_by_pieces, if it fails, punt. */
4088 if (tree_int_cst_lt (slen, len))
4090 unsigned int dest_align = get_pointer_alignment (dest);
4091 const char *p = c_getstr (src);
4092 rtx dest_mem;
4094 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4095 || !can_store_by_pieces (tree_to_uhwi (len),
4096 builtin_strncpy_read_str,
4097 CONST_CAST (char *, p),
4098 dest_align, false))
4099 return NULL_RTX;
4101 dest_mem = get_memory_rtx (dest, len);
4102 store_by_pieces (dest_mem, tree_to_uhwi (len),
4103 builtin_strncpy_read_str,
4104 CONST_CAST (char *, p), dest_align, false,
4105 RETURN_BEGIN);
4106 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4107 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4108 return dest_mem;
4111 return NULL_RTX;
4114 /* Return the RTL of a register in MODE generated from PREV in the
4115 previous iteration. */
4117 static rtx
4118 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4120 rtx target = nullptr;
4121 if (prev != nullptr && prev->data != nullptr)
4123 /* Use the previous data in the same mode. */
4124 if (prev->mode == mode)
4125 return prev->data;
4127 fixed_size_mode prev_mode = prev->mode;
4129 /* Don't use the previous data to write QImode if it is in a
4130 vector mode. */
4131 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4132 return target;
4134 rtx prev_rtx = prev->data;
4136 if (REG_P (prev_rtx)
4137 && HARD_REGISTER_P (prev_rtx)
4138 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4140 /* This case occurs when PREV_MODE is a vector and when
4141 MODE is too small to store using vector operations.
4142 After register allocation, the code will need to move the
4143 lowpart of the vector register into a non-vector register.
4145 Also, the target has chosen to use a hard register
4146 instead of going with the default choice of using a
4147 pseudo register. We should respect that choice and try to
4148 avoid creating a pseudo register with the same mode as the
4149 current hard register.
4151 In principle, we could just use a lowpart MODE subreg of
4152 the vector register. However, the vector register mode might
4153 be too wide for non-vector registers, and we already know
4154 that the non-vector mode is too small for vector registers.
4155 It's therefore likely that we'd need to spill to memory in
4156 the vector mode and reload the non-vector value from there.
4158 Try to avoid that by reducing the vector register to the
4159 smallest size that it can hold. This should increase the
4160 chances that non-vector registers can hold both the inner
4161 and outer modes of the subreg that we generate later. */
4162 machine_mode m;
4163 fixed_size_mode candidate;
4164 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4165 if (is_a<fixed_size_mode> (m, &candidate))
4167 if (GET_MODE_SIZE (candidate)
4168 >= GET_MODE_SIZE (prev_mode))
4169 break;
4170 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4171 && lowpart_subreg_regno (REGNO (prev_rtx),
4172 prev_mode, candidate) >= 0)
4174 target = lowpart_subreg (candidate, prev_rtx,
4175 prev_mode);
4176 prev_rtx = target;
4177 prev_mode = candidate;
4178 break;
4181 if (target == nullptr)
4182 prev_rtx = copy_to_reg (prev_rtx);
4185 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4187 return target;
4190 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4191 bytes from constant string DATA + OFFSET and return it as target
4192 constant. If PREV isn't nullptr, it has the RTL info from the
4193 previous iteration. */
4196 builtin_memset_read_str (void *data, void *prev,
4197 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4198 fixed_size_mode mode)
4200 const char *c = (const char *) data;
4201 unsigned int size = GET_MODE_SIZE (mode);
4203 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4204 mode);
4205 if (target != nullptr)
4206 return target;
4207 rtx src = gen_int_mode (*c, QImode);
4209 if (VECTOR_MODE_P (mode))
4211 gcc_assert (GET_MODE_INNER (mode) == QImode);
4213 rtx const_vec = gen_const_vec_duplicate (mode, src);
4214 if (prev == NULL)
4215 /* Return CONST_VECTOR when called by a query function. */
4216 return const_vec;
4218 /* Use the move expander with CONST_VECTOR. */
4219 target = gen_reg_rtx (mode);
4220 emit_move_insn (target, const_vec);
4221 return target;
4224 char *p = XALLOCAVEC (char, size);
4226 memset (p, *c, size);
4228 /* Vector modes should be handled above. */
4229 return c_readstr (p, as_a <scalar_int_mode> (mode));
4232 /* Callback routine for store_by_pieces. Return the RTL of a register
4233 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4234 char value given in the RTL register data. For example, if mode is
4235 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4236 nullptr, it has the RTL info from the previous iteration. */
4238 static rtx
4239 builtin_memset_gen_str (void *data, void *prev,
4240 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4241 fixed_size_mode mode)
4243 rtx target, coeff;
4244 size_t size;
4245 char *p;
4247 size = GET_MODE_SIZE (mode);
4248 if (size == 1)
4249 return (rtx) data;
4251 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4252 if (target != nullptr)
4253 return target;
4255 if (VECTOR_MODE_P (mode))
4257 gcc_assert (GET_MODE_INNER (mode) == QImode);
4259 /* vec_duplicate_optab is a precondition to pick a vector mode for
4260 the memset expander. */
4261 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4263 target = gen_reg_rtx (mode);
4264 class expand_operand ops[2];
4265 create_output_operand (&ops[0], target, mode);
4266 create_input_operand (&ops[1], (rtx) data, QImode);
4267 expand_insn (icode, 2, ops);
4268 if (!rtx_equal_p (target, ops[0].value))
4269 emit_move_insn (target, ops[0].value);
4271 return target;
4274 p = XALLOCAVEC (char, size);
4275 memset (p, 1, size);
4276 /* Vector modes should be handled above. */
4277 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
4279 target = convert_to_mode (mode, (rtx) data, 1);
4280 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4281 return force_reg (mode, target);
4284 /* Expand expression EXP, which is a call to the memset builtin. Return
4285 NULL_RTX if we failed the caller should emit a normal call, otherwise
4286 try to get the result in TARGET, if convenient (and in mode MODE if that's
4287 convenient). */
4290 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4292 if (!validate_arglist (exp,
4293 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4294 return NULL_RTX;
4296 tree dest = CALL_EXPR_ARG (exp, 0);
4297 tree val = CALL_EXPR_ARG (exp, 1);
4298 tree len = CALL_EXPR_ARG (exp, 2);
4300 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4303 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4304 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4305 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4306 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4308 The strategy is to issue one store_by_pieces for each power of two,
4309 from most to least significant, guarded by a test on whether there
4310 are at least that many bytes left to copy in LEN.
4312 ??? Should we skip some powers of two in favor of loops? Maybe start
4313 at the max of TO/LEN/word alignment, at least when optimizing for
4314 size, instead of ensuring O(log len) dynamic compares? */
4316 bool
4317 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4318 unsigned HOST_WIDE_INT min_len,
4319 unsigned HOST_WIDE_INT max_len,
4320 rtx val, char valc, unsigned int align)
4322 int max_bits = floor_log2 (max_len);
4323 int min_bits = floor_log2 (min_len);
4324 int sctz_len = ctz_len;
4326 gcc_checking_assert (sctz_len >= 0);
4328 if (val)
4329 valc = 1;
4331 /* Bits more significant than TST_BITS are part of the shared prefix
4332 in the binary representation of both min_len and max_len. Since
4333 they're identical, we don't need to test them in the loop. */
4334 int tst_bits = (max_bits != min_bits ? max_bits
4335 : floor_log2 (max_len ^ min_len));
4337 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4338 bytes, to lower max_bits. In the unlikely case of a constant LEN
4339 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4340 single store_by_pieces, but otherwise, select the minimum multiple
4341 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4342 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4343 unsigned HOST_WIDE_INT blksize;
4344 if (max_len > min_len)
4346 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4347 align / BITS_PER_UNIT);
4348 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4349 blksize &= ~(alrng - 1);
4351 else if (max_len == min_len)
4352 blksize = max_len;
4353 else
4354 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4355 return false;
4356 if (min_len >= blksize)
4358 min_len -= blksize;
4359 min_bits = floor_log2 (min_len);
4360 max_len -= blksize;
4361 max_bits = floor_log2 (max_len);
4363 tst_bits = (max_bits != min_bits ? max_bits
4364 : floor_log2 (max_len ^ min_len));
4366 else
4367 blksize = 0;
4369 /* Check that we can use store by pieces for the maximum store count
4370 we may issue (initial fixed-size block, plus conditional
4371 power-of-two-sized from max_bits to ctz_len. */
4372 unsigned HOST_WIDE_INT xlenest = blksize;
4373 if (max_bits >= 0)
4374 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4375 - (HOST_WIDE_INT_1U << ctz_len));
4376 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4377 &valc, align, true))
4378 return false;
4380 by_pieces_constfn constfun;
4381 void *constfundata;
4382 if (val)
4384 constfun = builtin_memset_gen_str;
4385 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4386 val);
4388 else
4390 constfun = builtin_memset_read_str;
4391 constfundata = &valc;
4394 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4395 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4396 to = replace_equiv_address (to, ptr);
4397 set_mem_align (to, align);
4399 if (blksize)
4401 to = store_by_pieces (to, blksize,
4402 constfun, constfundata,
4403 align, true,
4404 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4405 if (max_len == 0)
4406 return true;
4408 /* Adjust PTR, TO and REM. Since TO's address is likely
4409 PTR+offset, we have to replace it. */
4410 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4411 to = replace_equiv_address (to, ptr);
4412 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4413 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4416 /* Iterate over power-of-two block sizes from the maximum length to
4417 the least significant bit possibly set in the length. */
4418 for (int i = max_bits; i >= sctz_len; i--)
4420 rtx_code_label *label = NULL;
4421 blksize = HOST_WIDE_INT_1U << i;
4423 /* If we're past the bits shared between min_ and max_len, expand
4424 a test on the dynamic length, comparing it with the
4425 BLKSIZE. */
4426 if (i <= tst_bits)
4428 label = gen_label_rtx ();
4429 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4430 ptr_mode, 1, label,
4431 profile_probability::even ());
4433 /* If we are at a bit that is in the prefix shared by min_ and
4434 max_len, skip this BLKSIZE if the bit is clear. */
4435 else if ((max_len & blksize) == 0)
4436 continue;
4438 /* Issue a store of BLKSIZE bytes. */
4439 to = store_by_pieces (to, blksize,
4440 constfun, constfundata,
4441 align, true,
4442 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4444 /* Adjust REM and PTR, unless this is the last iteration. */
4445 if (i != sctz_len)
4447 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4448 to = replace_equiv_address (to, ptr);
4449 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4450 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4453 if (label)
4455 emit_label (label);
4457 /* Given conditional stores, the offset can no longer be
4458 known, so clear it. */
4459 clear_mem_offset (to);
4463 return true;
4466 /* Helper function to do the actual work for expand_builtin_memset. The
4467 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4468 so that this can also be called without constructing an actual CALL_EXPR.
4469 The other arguments and return value are the same as for
4470 expand_builtin_memset. */
4472 static rtx
4473 expand_builtin_memset_args (tree dest, tree val, tree len,
4474 rtx target, machine_mode mode, tree orig_exp)
4476 tree fndecl, fn;
4477 enum built_in_function fcode;
4478 machine_mode val_mode;
4479 char c;
4480 unsigned int dest_align;
4481 rtx dest_mem, dest_addr, len_rtx;
4482 HOST_WIDE_INT expected_size = -1;
4483 unsigned int expected_align = 0;
4484 unsigned HOST_WIDE_INT min_size;
4485 unsigned HOST_WIDE_INT max_size;
4486 unsigned HOST_WIDE_INT probable_max_size;
4488 dest_align = get_pointer_alignment (dest);
4490 /* If DEST is not a pointer type, don't do this operation in-line. */
4491 if (dest_align == 0)
4492 return NULL_RTX;
4494 if (currently_expanding_gimple_stmt)
4495 stringop_block_profile (currently_expanding_gimple_stmt,
4496 &expected_align, &expected_size);
4498 if (expected_align < dest_align)
4499 expected_align = dest_align;
4501 /* If the LEN parameter is zero, return DEST. */
4502 if (integer_zerop (len))
4504 /* Evaluate and ignore VAL in case it has side-effects. */
4505 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4506 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4509 /* Stabilize the arguments in case we fail. */
4510 dest = builtin_save_expr (dest);
4511 val = builtin_save_expr (val);
4512 len = builtin_save_expr (len);
4514 len_rtx = expand_normal (len);
4515 determine_block_size (len, len_rtx, &min_size, &max_size,
4516 &probable_max_size);
4517 dest_mem = get_memory_rtx (dest, len);
4518 val_mode = TYPE_MODE (unsigned_char_type_node);
4520 if (TREE_CODE (val) != INTEGER_CST
4521 || target_char_cast (val, &c))
4523 rtx val_rtx;
4525 val_rtx = expand_normal (val);
4526 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4528 /* Assume that we can memset by pieces if we can store
4529 * the coefficients by pieces (in the required modes).
4530 * We can't pass builtin_memset_gen_str as that emits RTL. */
4531 c = 1;
4532 if (tree_fits_uhwi_p (len)
4533 && can_store_by_pieces (tree_to_uhwi (len),
4534 builtin_memset_read_str, &c, dest_align,
4535 true))
4537 val_rtx = force_reg (val_mode, val_rtx);
4538 store_by_pieces (dest_mem, tree_to_uhwi (len),
4539 builtin_memset_gen_str, val_rtx, dest_align,
4540 true, RETURN_BEGIN);
4542 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4543 dest_align, expected_align,
4544 expected_size, min_size, max_size,
4545 probable_max_size)
4546 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4547 tree_ctz (len),
4548 min_size, max_size,
4549 val_rtx, 0,
4550 dest_align))
4551 goto do_libcall;
4553 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4554 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4555 return dest_mem;
4558 if (c)
4560 if (tree_fits_uhwi_p (len)
4561 && can_store_by_pieces (tree_to_uhwi (len),
4562 builtin_memset_read_str, &c, dest_align,
4563 true))
4564 store_by_pieces (dest_mem, tree_to_uhwi (len),
4565 builtin_memset_read_str, &c, dest_align, true,
4566 RETURN_BEGIN);
4567 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4568 gen_int_mode (c, val_mode),
4569 dest_align, expected_align,
4570 expected_size, min_size, max_size,
4571 probable_max_size)
4572 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4573 tree_ctz (len),
4574 min_size, max_size,
4575 NULL_RTX, c,
4576 dest_align))
4577 goto do_libcall;
4579 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4580 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4581 return dest_mem;
4584 set_mem_align (dest_mem, dest_align);
4585 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4586 CALL_EXPR_TAILCALL (orig_exp)
4587 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4588 expected_align, expected_size,
4589 min_size, max_size,
4590 probable_max_size, tree_ctz (len));
4592 if (dest_addr == 0)
4594 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4595 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4598 return dest_addr;
4600 do_libcall:
4601 fndecl = get_callee_fndecl (orig_exp);
4602 fcode = DECL_FUNCTION_CODE (fndecl);
4603 if (fcode == BUILT_IN_MEMSET)
4604 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4605 dest, val, len);
4606 else if (fcode == BUILT_IN_BZERO)
4607 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4608 dest, len);
4609 else
4610 gcc_unreachable ();
4611 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4612 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4613 return expand_call (fn, target, target == const0_rtx);
4616 /* Expand expression EXP, which is a call to the bzero builtin. Return
4617 NULL_RTX if we failed the caller should emit a normal call. */
4619 static rtx
4620 expand_builtin_bzero (tree exp)
4622 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4623 return NULL_RTX;
4625 tree dest = CALL_EXPR_ARG (exp, 0);
4626 tree size = CALL_EXPR_ARG (exp, 1);
4628 /* New argument list transforming bzero(ptr x, int y) to
4629 memset(ptr x, int 0, size_t y). This is done this way
4630 so that if it isn't expanded inline, we fallback to
4631 calling bzero instead of memset. */
4633 location_t loc = EXPR_LOCATION (exp);
4635 return expand_builtin_memset_args (dest, integer_zero_node,
4636 fold_convert_loc (loc,
4637 size_type_node, size),
4638 const0_rtx, VOIDmode, exp);
4641 /* Try to expand cmpstr operation ICODE with the given operands.
4642 Return the result rtx on success, otherwise return null. */
4644 static rtx
4645 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4646 HOST_WIDE_INT align)
4648 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4650 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4651 target = NULL_RTX;
4653 class expand_operand ops[4];
4654 create_output_operand (&ops[0], target, insn_mode);
4655 create_fixed_operand (&ops[1], arg1_rtx);
4656 create_fixed_operand (&ops[2], arg2_rtx);
4657 create_integer_operand (&ops[3], align);
4658 if (maybe_expand_insn (icode, 4, ops))
4659 return ops[0].value;
4660 return NULL_RTX;
4663 /* Expand expression EXP, which is a call to the memcmp built-in function.
4664 Return NULL_RTX if we failed and the caller should emit a normal call,
4665 otherwise try to get the result in TARGET, if convenient.
4666 RESULT_EQ is true if we can relax the returned value to be either zero
4667 or nonzero, without caring about the sign. */
4669 static rtx
4670 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4672 if (!validate_arglist (exp,
4673 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4674 return NULL_RTX;
4676 tree arg1 = CALL_EXPR_ARG (exp, 0);
4677 tree arg2 = CALL_EXPR_ARG (exp, 1);
4678 tree len = CALL_EXPR_ARG (exp, 2);
4680 /* Due to the performance benefit, always inline the calls first
4681 when result_eq is false. */
4682 rtx result = NULL_RTX;
4683 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4684 if (!result_eq && fcode != BUILT_IN_BCMP)
4686 result = inline_expand_builtin_bytecmp (exp, target);
4687 if (result)
4688 return result;
4691 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4692 location_t loc = EXPR_LOCATION (exp);
4694 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4695 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4697 /* If we don't have POINTER_TYPE, call the function. */
4698 if (arg1_align == 0 || arg2_align == 0)
4699 return NULL_RTX;
4701 rtx arg1_rtx = get_memory_rtx (arg1, len);
4702 rtx arg2_rtx = get_memory_rtx (arg2, len);
4703 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4705 /* Set MEM_SIZE as appropriate. */
4706 if (CONST_INT_P (len_rtx))
4708 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4709 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4712 by_pieces_constfn constfn = NULL;
4714 /* Try to get the byte representation of the constant ARG2 (or, only
4715 when the function's result is used for equality to zero, ARG1)
4716 points to, with its byte size in NBYTES. */
4717 unsigned HOST_WIDE_INT nbytes;
4718 const char *rep = getbyterep (arg2, &nbytes);
4719 if (result_eq && rep == NULL)
4721 /* For equality to zero the arguments are interchangeable. */
4722 rep = getbyterep (arg1, &nbytes);
4723 if (rep != NULL)
4724 std::swap (arg1_rtx, arg2_rtx);
4727 /* If the function's constant bound LEN_RTX is less than or equal
4728 to the byte size of the representation of the constant argument,
4729 and if block move would be done by pieces, we can avoid loading
4730 the bytes from memory and only store the computed constant result. */
4731 if (rep
4732 && CONST_INT_P (len_rtx)
4733 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4734 constfn = builtin_memcpy_read_str;
4736 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4737 TREE_TYPE (len), target,
4738 result_eq, constfn,
4739 CONST_CAST (char *, rep));
4741 if (result)
4743 /* Return the value in the proper mode for this function. */
4744 if (GET_MODE (result) == mode)
4745 return result;
4747 if (target != 0)
4749 convert_move (target, result, 0);
4750 return target;
4753 return convert_to_mode (mode, result, 0);
4756 return NULL_RTX;
4759 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4760 if we failed the caller should emit a normal call, otherwise try to get
4761 the result in TARGET, if convenient. */
4763 static rtx
4764 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4766 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4767 return NULL_RTX;
4769 tree arg1 = CALL_EXPR_ARG (exp, 0);
4770 tree arg2 = CALL_EXPR_ARG (exp, 1);
4772 /* Due to the performance benefit, always inline the calls first. */
4773 rtx result = NULL_RTX;
4774 result = inline_expand_builtin_bytecmp (exp, target);
4775 if (result)
4776 return result;
4778 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4779 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4780 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4781 return NULL_RTX;
4783 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4784 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4786 /* If we don't have POINTER_TYPE, call the function. */
4787 if (arg1_align == 0 || arg2_align == 0)
4788 return NULL_RTX;
4790 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4791 arg1 = builtin_save_expr (arg1);
4792 arg2 = builtin_save_expr (arg2);
4794 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4795 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4797 /* Try to call cmpstrsi. */
4798 if (cmpstr_icode != CODE_FOR_nothing)
4799 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4800 MIN (arg1_align, arg2_align));
4802 /* Try to determine at least one length and call cmpstrnsi. */
4803 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4805 tree len;
4806 rtx arg3_rtx;
4808 tree len1 = c_strlen (arg1, 1);
4809 tree len2 = c_strlen (arg2, 1);
4811 if (len1)
4812 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4813 if (len2)
4814 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4816 /* If we don't have a constant length for the first, use the length
4817 of the second, if we know it. We don't require a constant for
4818 this case; some cost analysis could be done if both are available
4819 but neither is constant. For now, assume they're equally cheap,
4820 unless one has side effects. If both strings have constant lengths,
4821 use the smaller. */
4823 if (!len1)
4824 len = len2;
4825 else if (!len2)
4826 len = len1;
4827 else if (TREE_SIDE_EFFECTS (len1))
4828 len = len2;
4829 else if (TREE_SIDE_EFFECTS (len2))
4830 len = len1;
4831 else if (TREE_CODE (len1) != INTEGER_CST)
4832 len = len2;
4833 else if (TREE_CODE (len2) != INTEGER_CST)
4834 len = len1;
4835 else if (tree_int_cst_lt (len1, len2))
4836 len = len1;
4837 else
4838 len = len2;
4840 /* If both arguments have side effects, we cannot optimize. */
4841 if (len && !TREE_SIDE_EFFECTS (len))
4843 arg3_rtx = expand_normal (len);
4844 result = expand_cmpstrn_or_cmpmem
4845 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4846 arg3_rtx, MIN (arg1_align, arg2_align));
4850 tree fndecl = get_callee_fndecl (exp);
4851 if (result)
4853 /* Return the value in the proper mode for this function. */
4854 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4855 if (GET_MODE (result) == mode)
4856 return result;
4857 if (target == 0)
4858 return convert_to_mode (mode, result, 0);
4859 convert_move (target, result, 0);
4860 return target;
4863 /* Expand the library call ourselves using a stabilized argument
4864 list to avoid re-evaluating the function's arguments twice. */
4865 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4866 copy_warning (fn, exp);
4867 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4868 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4869 return expand_call (fn, target, target == const0_rtx);
4872 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4873 NULL_RTX if we failed the caller should emit a normal call, otherwise
4874 try to get the result in TARGET, if convenient. */
4876 static rtx
4877 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4878 ATTRIBUTE_UNUSED machine_mode mode)
4880 if (!validate_arglist (exp,
4881 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4882 return NULL_RTX;
4884 tree arg1 = CALL_EXPR_ARG (exp, 0);
4885 tree arg2 = CALL_EXPR_ARG (exp, 1);
4886 tree arg3 = CALL_EXPR_ARG (exp, 2);
4888 location_t loc = EXPR_LOCATION (exp);
4889 tree len1 = c_strlen (arg1, 1);
4890 tree len2 = c_strlen (arg2, 1);
4892 /* Due to the performance benefit, always inline the calls first. */
4893 rtx result = NULL_RTX;
4894 result = inline_expand_builtin_bytecmp (exp, target);
4895 if (result)
4896 return result;
4898 /* If c_strlen can determine an expression for one of the string
4899 lengths, and it doesn't have side effects, then emit cmpstrnsi
4900 using length MIN(strlen(string)+1, arg3). */
4901 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4902 if (cmpstrn_icode == CODE_FOR_nothing)
4903 return NULL_RTX;
4905 tree len;
4907 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4908 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4910 if (len1)
4911 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4912 if (len2)
4913 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4915 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4917 /* If we don't have a constant length for the first, use the length
4918 of the second, if we know it. If neither string is constant length,
4919 use the given length argument. We don't require a constant for
4920 this case; some cost analysis could be done if both are available
4921 but neither is constant. For now, assume they're equally cheap,
4922 unless one has side effects. If both strings have constant lengths,
4923 use the smaller. */
4925 if (!len1 && !len2)
4926 len = len3;
4927 else if (!len1)
4928 len = len2;
4929 else if (!len2)
4930 len = len1;
4931 else if (TREE_SIDE_EFFECTS (len1))
4932 len = len2;
4933 else if (TREE_SIDE_EFFECTS (len2))
4934 len = len1;
4935 else if (TREE_CODE (len1) != INTEGER_CST)
4936 len = len2;
4937 else if (TREE_CODE (len2) != INTEGER_CST)
4938 len = len1;
4939 else if (tree_int_cst_lt (len1, len2))
4940 len = len1;
4941 else
4942 len = len2;
4944 /* If we are not using the given length, we must incorporate it here.
4945 The actual new length parameter will be MIN(len,arg3) in this case. */
4946 if (len != len3)
4948 len = fold_convert_loc (loc, sizetype, len);
4949 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4951 rtx arg1_rtx = get_memory_rtx (arg1, len);
4952 rtx arg2_rtx = get_memory_rtx (arg2, len);
4953 rtx arg3_rtx = expand_normal (len);
4954 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4955 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4956 MIN (arg1_align, arg2_align));
4958 tree fndecl = get_callee_fndecl (exp);
4959 if (result)
4961 /* Return the value in the proper mode for this function. */
4962 mode = TYPE_MODE (TREE_TYPE (exp));
4963 if (GET_MODE (result) == mode)
4964 return result;
4965 if (target == 0)
4966 return convert_to_mode (mode, result, 0);
4967 convert_move (target, result, 0);
4968 return target;
4971 /* Expand the library call ourselves using a stabilized argument
4972 list to avoid re-evaluating the function's arguments twice. */
4973 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4974 copy_warning (call, exp);
4975 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4976 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4977 return expand_call (call, target, target == const0_rtx);
4980 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4981 if that's convenient. */
4984 expand_builtin_saveregs (void)
4986 rtx val;
4987 rtx_insn *seq;
4989 /* Don't do __builtin_saveregs more than once in a function.
4990 Save the result of the first call and reuse it. */
4991 if (saveregs_value != 0)
4992 return saveregs_value;
4994 /* When this function is called, it means that registers must be
4995 saved on entry to this function. So we migrate the call to the
4996 first insn of this function. */
4998 start_sequence ();
5000 /* Do whatever the machine needs done in this case. */
5001 val = targetm.calls.expand_builtin_saveregs ();
5003 seq = get_insns ();
5004 end_sequence ();
5006 saveregs_value = val;
5008 /* Put the insns after the NOTE that starts the function. If this
5009 is inside a start_sequence, make the outer-level insn chain current, so
5010 the code is placed at the start of the function. */
5011 push_topmost_sequence ();
5012 emit_insn_after (seq, entry_of_function ());
5013 pop_topmost_sequence ();
5015 return val;
5018 /* Expand a call to __builtin_next_arg. */
5020 static rtx
5021 expand_builtin_next_arg (void)
5023 /* Checking arguments is already done in fold_builtin_next_arg
5024 that must be called before this function. */
5025 return expand_binop (ptr_mode, add_optab,
5026 crtl->args.internal_arg_pointer,
5027 crtl->args.arg_offset_rtx,
5028 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5031 /* Make it easier for the backends by protecting the valist argument
5032 from multiple evaluations. */
5034 static tree
5035 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5037 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5039 /* The current way of determining the type of valist is completely
5040 bogus. We should have the information on the va builtin instead. */
5041 if (!vatype)
5042 vatype = targetm.fn_abi_va_list (cfun->decl);
5044 if (TREE_CODE (vatype) == ARRAY_TYPE)
5046 if (TREE_SIDE_EFFECTS (valist))
5047 valist = save_expr (valist);
5049 /* For this case, the backends will be expecting a pointer to
5050 vatype, but it's possible we've actually been given an array
5051 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5052 So fix it. */
5053 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5055 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5056 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5059 else
5061 tree pt = build_pointer_type (vatype);
5063 if (! needs_lvalue)
5065 if (! TREE_SIDE_EFFECTS (valist))
5066 return valist;
5068 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5069 TREE_SIDE_EFFECTS (valist) = 1;
5072 if (TREE_SIDE_EFFECTS (valist))
5073 valist = save_expr (valist);
5074 valist = fold_build2_loc (loc, MEM_REF,
5075 vatype, valist, build_int_cst (pt, 0));
5078 return valist;
5081 /* The "standard" definition of va_list is void*. */
5083 tree
5084 std_build_builtin_va_list (void)
5086 return ptr_type_node;
5089 /* The "standard" abi va_list is va_list_type_node. */
5091 tree
5092 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5094 return va_list_type_node;
5097 /* The "standard" type of va_list is va_list_type_node. */
5099 tree
5100 std_canonical_va_list_type (tree type)
5102 tree wtype, htype;
5104 wtype = va_list_type_node;
5105 htype = type;
5107 if (TREE_CODE (wtype) == ARRAY_TYPE)
5109 /* If va_list is an array type, the argument may have decayed
5110 to a pointer type, e.g. by being passed to another function.
5111 In that case, unwrap both types so that we can compare the
5112 underlying records. */
5113 if (TREE_CODE (htype) == ARRAY_TYPE
5114 || POINTER_TYPE_P (htype))
5116 wtype = TREE_TYPE (wtype);
5117 htype = TREE_TYPE (htype);
5120 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5121 return va_list_type_node;
5123 return NULL_TREE;
5126 /* The "standard" implementation of va_start: just assign `nextarg' to
5127 the variable. */
5129 void
5130 std_expand_builtin_va_start (tree valist, rtx nextarg)
5132 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5133 convert_move (va_r, nextarg, 0);
5136 /* Expand EXP, a call to __builtin_va_start. */
5138 static rtx
5139 expand_builtin_va_start (tree exp)
5141 rtx nextarg;
5142 tree valist;
5143 location_t loc = EXPR_LOCATION (exp);
5145 if (call_expr_nargs (exp) < 2)
5147 error_at (loc, "too few arguments to function %<va_start%>");
5148 return const0_rtx;
5151 if (fold_builtin_next_arg (exp, true))
5152 return const0_rtx;
5154 nextarg = expand_builtin_next_arg ();
5155 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5157 if (targetm.expand_builtin_va_start)
5158 targetm.expand_builtin_va_start (valist, nextarg);
5159 else
5160 std_expand_builtin_va_start (valist, nextarg);
5162 return const0_rtx;
5165 /* Expand EXP, a call to __builtin_va_end. */
5167 static rtx
5168 expand_builtin_va_end (tree exp)
5170 tree valist = CALL_EXPR_ARG (exp, 0);
5172 /* Evaluate for side effects, if needed. I hate macros that don't
5173 do that. */
5174 if (TREE_SIDE_EFFECTS (valist))
5175 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5177 return const0_rtx;
5180 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5181 builtin rather than just as an assignment in stdarg.h because of the
5182 nastiness of array-type va_list types. */
5184 static rtx
5185 expand_builtin_va_copy (tree exp)
5187 tree dst, src, t;
5188 location_t loc = EXPR_LOCATION (exp);
5190 dst = CALL_EXPR_ARG (exp, 0);
5191 src = CALL_EXPR_ARG (exp, 1);
5193 dst = stabilize_va_list_loc (loc, dst, 1);
5194 src = stabilize_va_list_loc (loc, src, 0);
5196 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5198 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5200 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5201 TREE_SIDE_EFFECTS (t) = 1;
5202 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5204 else
5206 rtx dstb, srcb, size;
5208 /* Evaluate to pointers. */
5209 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5210 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5211 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5212 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5214 dstb = convert_memory_address (Pmode, dstb);
5215 srcb = convert_memory_address (Pmode, srcb);
5217 /* "Dereference" to BLKmode memories. */
5218 dstb = gen_rtx_MEM (BLKmode, dstb);
5219 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5220 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5221 srcb = gen_rtx_MEM (BLKmode, srcb);
5222 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5223 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5225 /* Copy. */
5226 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5229 return const0_rtx;
5232 /* Expand a call to one of the builtin functions __builtin_frame_address or
5233 __builtin_return_address. */
5235 static rtx
5236 expand_builtin_frame_address (tree fndecl, tree exp)
5238 /* The argument must be a nonnegative integer constant.
5239 It counts the number of frames to scan up the stack.
5240 The value is either the frame pointer value or the return
5241 address saved in that frame. */
5242 if (call_expr_nargs (exp) == 0)
5243 /* Warning about missing arg was already issued. */
5244 return const0_rtx;
5245 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5247 error ("invalid argument to %qD", fndecl);
5248 return const0_rtx;
5250 else
5252 /* Number of frames to scan up the stack. */
5253 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5255 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5257 /* Some ports cannot access arbitrary stack frames. */
5258 if (tem == NULL)
5260 warning (0, "unsupported argument to %qD", fndecl);
5261 return const0_rtx;
5264 if (count)
5266 /* Warn since no effort is made to ensure that any frame
5267 beyond the current one exists or can be safely reached. */
5268 warning (OPT_Wframe_address, "calling %qD with "
5269 "a nonzero argument is unsafe", fndecl);
5272 /* For __builtin_frame_address, return what we've got. */
5273 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5274 return tem;
5276 if (!REG_P (tem)
5277 && ! CONSTANT_P (tem))
5278 tem = copy_addr_to_reg (tem);
5279 return tem;
5283 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5284 failed and the caller should emit a normal call. */
5286 static rtx
5287 expand_builtin_alloca (tree exp)
5289 rtx op0;
5290 rtx result;
5291 unsigned int align;
5292 tree fndecl = get_callee_fndecl (exp);
5293 HOST_WIDE_INT max_size;
5294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5295 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5296 bool valid_arglist
5297 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5298 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5299 VOID_TYPE)
5300 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5301 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5302 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5304 if (!valid_arglist)
5305 return NULL_RTX;
5307 /* Compute the argument. */
5308 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5310 /* Compute the alignment. */
5311 align = (fcode == BUILT_IN_ALLOCA
5312 ? BIGGEST_ALIGNMENT
5313 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5315 /* Compute the maximum size. */
5316 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5317 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5318 : -1);
5320 /* Allocate the desired space. If the allocation stems from the declaration
5321 of a variable-sized object, it cannot accumulate. */
5322 result
5323 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5324 result = convert_memory_address (ptr_mode, result);
5326 /* Dynamic allocations for variables are recorded during gimplification. */
5327 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5328 record_dynamic_alloc (exp);
5330 return result;
5333 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5334 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5335 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5336 handle_builtin_stack_restore function. */
5338 static rtx
5339 expand_asan_emit_allocas_unpoison (tree exp)
5341 tree arg0 = CALL_EXPR_ARG (exp, 0);
5342 tree arg1 = CALL_EXPR_ARG (exp, 1);
5343 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5344 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5345 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5346 stack_pointer_rtx, NULL_RTX, 0,
5347 OPTAB_LIB_WIDEN);
5348 off = convert_modes (ptr_mode, Pmode, off, 0);
5349 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5350 OPTAB_LIB_WIDEN);
5351 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5352 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5353 top, ptr_mode, bot, ptr_mode);
5354 return ret;
5357 /* Expand a call to bswap builtin in EXP.
5358 Return NULL_RTX if a normal call should be emitted rather than expanding the
5359 function in-line. If convenient, the result should be placed in TARGET.
5360 SUBTARGET may be used as the target for computing one of EXP's operands. */
5362 static rtx
5363 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5364 rtx subtarget)
5366 tree arg;
5367 rtx op0;
5369 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5370 return NULL_RTX;
5372 arg = CALL_EXPR_ARG (exp, 0);
5373 op0 = expand_expr (arg,
5374 subtarget && GET_MODE (subtarget) == target_mode
5375 ? subtarget : NULL_RTX,
5376 target_mode, EXPAND_NORMAL);
5377 if (GET_MODE (op0) != target_mode)
5378 op0 = convert_to_mode (target_mode, op0, 1);
5380 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5382 gcc_assert (target);
5384 return convert_to_mode (target_mode, target, 1);
5387 /* Expand a call to a unary builtin in EXP.
5388 Return NULL_RTX if a normal call should be emitted rather than expanding the
5389 function in-line. If convenient, the result should be placed in TARGET.
5390 SUBTARGET may be used as the target for computing one of EXP's operands. */
5392 static rtx
5393 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5394 rtx subtarget, optab op_optab)
5396 rtx op0;
5398 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5399 return NULL_RTX;
5401 /* Compute the argument. */
5402 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5403 (subtarget
5404 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5405 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5406 VOIDmode, EXPAND_NORMAL);
5407 /* Compute op, into TARGET if possible.
5408 Set TARGET to wherever the result comes back. */
5409 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5410 op_optab, op0, target, op_optab != clrsb_optab);
5411 gcc_assert (target);
5413 return convert_to_mode (target_mode, target, 0);
5416 /* Expand a call to __builtin_expect. We just return our argument
5417 as the builtin_expect semantic should've been already executed by
5418 tree branch prediction pass. */
5420 static rtx
5421 expand_builtin_expect (tree exp, rtx target)
5423 tree arg;
5425 if (call_expr_nargs (exp) < 2)
5426 return const0_rtx;
5427 arg = CALL_EXPR_ARG (exp, 0);
5429 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5430 /* When guessing was done, the hints should be already stripped away. */
5431 gcc_assert (!flag_guess_branch_prob
5432 || optimize == 0 || seen_error ());
5433 return target;
5436 /* Expand a call to __builtin_expect_with_probability. We just return our
5437 argument as the builtin_expect semantic should've been already executed by
5438 tree branch prediction pass. */
5440 static rtx
5441 expand_builtin_expect_with_probability (tree exp, rtx target)
5443 tree arg;
5445 if (call_expr_nargs (exp) < 3)
5446 return const0_rtx;
5447 arg = CALL_EXPR_ARG (exp, 0);
5449 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5450 /* When guessing was done, the hints should be already stripped away. */
5451 gcc_assert (!flag_guess_branch_prob
5452 || optimize == 0 || seen_error ());
5453 return target;
5457 /* Expand a call to __builtin_assume_aligned. We just return our first
5458 argument as the builtin_assume_aligned semantic should've been already
5459 executed by CCP. */
5461 static rtx
5462 expand_builtin_assume_aligned (tree exp, rtx target)
5464 if (call_expr_nargs (exp) < 2)
5465 return const0_rtx;
5466 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5467 EXPAND_NORMAL);
5468 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5469 && (call_expr_nargs (exp) < 3
5470 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5471 return target;
5474 void
5475 expand_builtin_trap (void)
5477 if (targetm.have_trap ())
5479 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5480 /* For trap insns when not accumulating outgoing args force
5481 REG_ARGS_SIZE note to prevent crossjumping of calls with
5482 different args sizes. */
5483 if (!ACCUMULATE_OUTGOING_ARGS)
5484 add_args_size_note (insn, stack_pointer_delta);
5486 else
5488 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5489 tree call_expr = build_call_expr (fn, 0);
5490 expand_call (call_expr, NULL_RTX, false);
5493 emit_barrier ();
5496 /* Expand a call to __builtin_unreachable. We do nothing except emit
5497 a barrier saying that control flow will not pass here.
5499 It is the responsibility of the program being compiled to ensure
5500 that control flow does never reach __builtin_unreachable. */
5501 static void
5502 expand_builtin_unreachable (void)
5504 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5505 to avoid this. */
5506 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5507 emit_barrier ();
5510 /* Expand EXP, a call to fabs, fabsf or fabsl.
5511 Return NULL_RTX if a normal call should be emitted rather than expanding
5512 the function inline. If convenient, the result should be placed
5513 in TARGET. SUBTARGET may be used as the target for computing
5514 the operand. */
5516 static rtx
5517 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5519 machine_mode mode;
5520 tree arg;
5521 rtx op0;
5523 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5524 return NULL_RTX;
5526 arg = CALL_EXPR_ARG (exp, 0);
5527 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5528 mode = TYPE_MODE (TREE_TYPE (arg));
5529 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5530 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5533 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5534 Return NULL is a normal call should be emitted rather than expanding the
5535 function inline. If convenient, the result should be placed in TARGET.
5536 SUBTARGET may be used as the target for computing the operand. */
5538 static rtx
5539 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5541 rtx op0, op1;
5542 tree arg;
5544 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5545 return NULL_RTX;
5547 arg = CALL_EXPR_ARG (exp, 0);
5548 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5550 arg = CALL_EXPR_ARG (exp, 1);
5551 op1 = expand_normal (arg);
5553 return expand_copysign (op0, op1, target);
5556 /* Emit a call to __builtin___clear_cache. */
5558 void
5559 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5561 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5562 BUILTIN_ASM_NAME_PTR
5563 (BUILT_IN_CLEAR_CACHE));
5565 emit_library_call (callee,
5566 LCT_NORMAL, VOIDmode,
5567 convert_memory_address (ptr_mode, begin), ptr_mode,
5568 convert_memory_address (ptr_mode, end), ptr_mode);
5571 /* Emit a call to __builtin___clear_cache, unless the target specifies
5572 it as do-nothing. This function can be used by trampoline
5573 finalizers to duplicate the effects of expanding a call to the
5574 clear_cache builtin. */
5576 void
5577 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5579 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5580 || CONST_INT_P (begin))
5581 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5582 || CONST_INT_P (end)));
5584 if (targetm.have_clear_cache ())
5586 /* We have a "clear_cache" insn, and it will handle everything. */
5587 class expand_operand ops[2];
5589 create_address_operand (&ops[0], begin);
5590 create_address_operand (&ops[1], end);
5592 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5593 return;
5595 else
5597 #ifndef CLEAR_INSN_CACHE
5598 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5599 does nothing. There is no need to call it. Do nothing. */
5600 return;
5601 #endif /* CLEAR_INSN_CACHE */
5604 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5607 /* Expand a call to __builtin___clear_cache. */
5609 static void
5610 expand_builtin___clear_cache (tree exp)
5612 tree begin, end;
5613 rtx begin_rtx, end_rtx;
5615 /* We must not expand to a library call. If we did, any
5616 fallback library function in libgcc that might contain a call to
5617 __builtin___clear_cache() would recurse infinitely. */
5618 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5620 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5621 return;
5624 begin = CALL_EXPR_ARG (exp, 0);
5625 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5627 end = CALL_EXPR_ARG (exp, 1);
5628 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5630 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5633 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5635 static rtx
5636 round_trampoline_addr (rtx tramp)
5638 rtx temp, addend, mask;
5640 /* If we don't need too much alignment, we'll have been guaranteed
5641 proper alignment by get_trampoline_type. */
5642 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5643 return tramp;
5645 /* Round address up to desired boundary. */
5646 temp = gen_reg_rtx (Pmode);
5647 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5648 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5650 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5651 temp, 0, OPTAB_LIB_WIDEN);
5652 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5653 temp, 0, OPTAB_LIB_WIDEN);
5655 return tramp;
5658 static rtx
5659 expand_builtin_init_trampoline (tree exp, bool onstack)
5661 tree t_tramp, t_func, t_chain;
5662 rtx m_tramp, r_tramp, r_chain, tmp;
5664 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5665 POINTER_TYPE, VOID_TYPE))
5666 return NULL_RTX;
5668 t_tramp = CALL_EXPR_ARG (exp, 0);
5669 t_func = CALL_EXPR_ARG (exp, 1);
5670 t_chain = CALL_EXPR_ARG (exp, 2);
5672 r_tramp = expand_normal (t_tramp);
5673 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5674 MEM_NOTRAP_P (m_tramp) = 1;
5676 /* If ONSTACK, the TRAMP argument should be the address of a field
5677 within the local function's FRAME decl. Either way, let's see if
5678 we can fill in the MEM_ATTRs for this memory. */
5679 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5680 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5682 /* Creator of a heap trampoline is responsible for making sure the
5683 address is aligned to at least STACK_BOUNDARY. Normally malloc
5684 will ensure this anyhow. */
5685 tmp = round_trampoline_addr (r_tramp);
5686 if (tmp != r_tramp)
5688 m_tramp = change_address (m_tramp, BLKmode, tmp);
5689 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5690 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5693 /* The FUNC argument should be the address of the nested function.
5694 Extract the actual function decl to pass to the hook. */
5695 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5696 t_func = TREE_OPERAND (t_func, 0);
5697 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5699 r_chain = expand_normal (t_chain);
5701 /* Generate insns to initialize the trampoline. */
5702 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5704 if (onstack)
5706 trampolines_created = 1;
5708 if (targetm.calls.custom_function_descriptors != 0)
5709 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5710 "trampoline generated for nested function %qD", t_func);
5713 return const0_rtx;
5716 static rtx
5717 expand_builtin_adjust_trampoline (tree exp)
5719 rtx tramp;
5721 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5722 return NULL_RTX;
5724 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5725 tramp = round_trampoline_addr (tramp);
5726 if (targetm.calls.trampoline_adjust_address)
5727 tramp = targetm.calls.trampoline_adjust_address (tramp);
5729 return tramp;
5732 /* Expand a call to the builtin descriptor initialization routine.
5733 A descriptor is made up of a couple of pointers to the static
5734 chain and the code entry in this order. */
5736 static rtx
5737 expand_builtin_init_descriptor (tree exp)
5739 tree t_descr, t_func, t_chain;
5740 rtx m_descr, r_descr, r_func, r_chain;
5742 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5743 VOID_TYPE))
5744 return NULL_RTX;
5746 t_descr = CALL_EXPR_ARG (exp, 0);
5747 t_func = CALL_EXPR_ARG (exp, 1);
5748 t_chain = CALL_EXPR_ARG (exp, 2);
5750 r_descr = expand_normal (t_descr);
5751 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5752 MEM_NOTRAP_P (m_descr) = 1;
5753 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5755 r_func = expand_normal (t_func);
5756 r_chain = expand_normal (t_chain);
5758 /* Generate insns to initialize the descriptor. */
5759 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5760 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5761 POINTER_SIZE / BITS_PER_UNIT), r_func);
5763 return const0_rtx;
5766 /* Expand a call to the builtin descriptor adjustment routine. */
5768 static rtx
5769 expand_builtin_adjust_descriptor (tree exp)
5771 rtx tramp;
5773 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5774 return NULL_RTX;
5776 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5778 /* Unalign the descriptor to allow runtime identification. */
5779 tramp = plus_constant (ptr_mode, tramp,
5780 targetm.calls.custom_function_descriptors);
5782 return force_operand (tramp, NULL_RTX);
5785 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5786 function. The function first checks whether the back end provides
5787 an insn to implement signbit for the respective mode. If not, it
5788 checks whether the floating point format of the value is such that
5789 the sign bit can be extracted. If that is not the case, error out.
5790 EXP is the expression that is a call to the builtin function; if
5791 convenient, the result should be placed in TARGET. */
5792 static rtx
5793 expand_builtin_signbit (tree exp, rtx target)
5795 const struct real_format *fmt;
5796 scalar_float_mode fmode;
5797 scalar_int_mode rmode, imode;
5798 tree arg;
5799 int word, bitpos;
5800 enum insn_code icode;
5801 rtx temp;
5802 location_t loc = EXPR_LOCATION (exp);
5804 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5805 return NULL_RTX;
5807 arg = CALL_EXPR_ARG (exp, 0);
5808 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5809 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5810 fmt = REAL_MODE_FORMAT (fmode);
5812 arg = builtin_save_expr (arg);
5814 /* Expand the argument yielding a RTX expression. */
5815 temp = expand_normal (arg);
5817 /* Check if the back end provides an insn that handles signbit for the
5818 argument's mode. */
5819 icode = optab_handler (signbit_optab, fmode);
5820 if (icode != CODE_FOR_nothing)
5822 rtx_insn *last = get_last_insn ();
5823 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5824 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
5825 return this_target;
5826 delete_insns_since (last);
5829 /* For floating point formats without a sign bit, implement signbit
5830 as "ARG < 0.0". */
5831 bitpos = fmt->signbit_ro;
5832 if (bitpos < 0)
5834 /* But we can't do this if the format supports signed zero. */
5835 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5837 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5838 build_real (TREE_TYPE (arg), dconst0));
5839 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5842 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5844 imode = int_mode_for_mode (fmode).require ();
5845 temp = gen_lowpart (imode, temp);
5847 else
5849 imode = word_mode;
5850 /* Handle targets with different FP word orders. */
5851 if (FLOAT_WORDS_BIG_ENDIAN)
5852 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5853 else
5854 word = bitpos / BITS_PER_WORD;
5855 temp = operand_subword_force (temp, word, fmode);
5856 bitpos = bitpos % BITS_PER_WORD;
5859 /* Force the intermediate word_mode (or narrower) result into a
5860 register. This avoids attempting to create paradoxical SUBREGs
5861 of floating point modes below. */
5862 temp = force_reg (imode, temp);
5864 /* If the bitpos is within the "result mode" lowpart, the operation
5865 can be implement with a single bitwise AND. Otherwise, we need
5866 a right shift and an AND. */
5868 if (bitpos < GET_MODE_BITSIZE (rmode))
5870 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5872 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5873 temp = gen_lowpart (rmode, temp);
5874 temp = expand_binop (rmode, and_optab, temp,
5875 immed_wide_int_const (mask, rmode),
5876 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5878 else
5880 /* Perform a logical right shift to place the signbit in the least
5881 significant bit, then truncate the result to the desired mode
5882 and mask just this bit. */
5883 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5884 temp = gen_lowpart (rmode, temp);
5885 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5886 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5889 return temp;
5892 /* Expand fork or exec calls. TARGET is the desired target of the
5893 call. EXP is the call. FN is the
5894 identificator of the actual function. IGNORE is nonzero if the
5895 value is to be ignored. */
5897 static rtx
5898 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5900 tree id, decl;
5901 tree call;
5903 /* If we are not profiling, just call the function. */
5904 if (!profile_arc_flag)
5905 return NULL_RTX;
5907 /* Otherwise call the wrapper. This should be equivalent for the rest of
5908 compiler, so the code does not diverge, and the wrapper may run the
5909 code necessary for keeping the profiling sane. */
5911 switch (DECL_FUNCTION_CODE (fn))
5913 case BUILT_IN_FORK:
5914 id = get_identifier ("__gcov_fork");
5915 break;
5917 case BUILT_IN_EXECL:
5918 id = get_identifier ("__gcov_execl");
5919 break;
5921 case BUILT_IN_EXECV:
5922 id = get_identifier ("__gcov_execv");
5923 break;
5925 case BUILT_IN_EXECLP:
5926 id = get_identifier ("__gcov_execlp");
5927 break;
5929 case BUILT_IN_EXECLE:
5930 id = get_identifier ("__gcov_execle");
5931 break;
5933 case BUILT_IN_EXECVP:
5934 id = get_identifier ("__gcov_execvp");
5935 break;
5937 case BUILT_IN_EXECVE:
5938 id = get_identifier ("__gcov_execve");
5939 break;
5941 default:
5942 gcc_unreachable ();
5945 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5946 FUNCTION_DECL, id, TREE_TYPE (fn));
5947 DECL_EXTERNAL (decl) = 1;
5948 TREE_PUBLIC (decl) = 1;
5949 DECL_ARTIFICIAL (decl) = 1;
5950 TREE_NOTHROW (decl) = 1;
5951 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5952 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5953 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5954 return expand_call (call, target, ignore);
5959 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5960 the pointer in these functions is void*, the tree optimizers may remove
5961 casts. The mode computed in expand_builtin isn't reliable either, due
5962 to __sync_bool_compare_and_swap.
5964 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5965 group of builtins. This gives us log2 of the mode size. */
5967 static inline machine_mode
5968 get_builtin_sync_mode (int fcode_diff)
5970 /* The size is not negotiable, so ask not to get BLKmode in return
5971 if the target indicates that a smaller size would be better. */
5972 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5975 /* Expand the memory expression LOC and return the appropriate memory operand
5976 for the builtin_sync operations. */
5978 static rtx
5979 get_builtin_sync_mem (tree loc, machine_mode mode)
5981 rtx addr, mem;
5982 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5983 ? TREE_TYPE (TREE_TYPE (loc))
5984 : TREE_TYPE (loc));
5985 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5987 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5988 addr = convert_memory_address (addr_mode, addr);
5990 /* Note that we explicitly do not want any alias information for this
5991 memory, so that we kill all other live memories. Otherwise we don't
5992 satisfy the full barrier semantics of the intrinsic. */
5993 mem = gen_rtx_MEM (mode, addr);
5995 set_mem_addr_space (mem, addr_space);
5997 mem = validize_mem (mem);
5999 /* The alignment needs to be at least according to that of the mode. */
6000 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6001 get_pointer_alignment (loc)));
6002 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6003 MEM_VOLATILE_P (mem) = 1;
6005 return mem;
6008 /* Make sure an argument is in the right mode.
6009 EXP is the tree argument.
6010 MODE is the mode it should be in. */
6012 static rtx
6013 expand_expr_force_mode (tree exp, machine_mode mode)
6015 rtx val;
6016 machine_mode old_mode;
6018 if (TREE_CODE (exp) == SSA_NAME
6019 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6021 /* Undo argument promotion if possible, as combine might not
6022 be able to do it later due to MEM_VOLATILE_P uses in the
6023 patterns. */
6024 gimple *g = get_gimple_for_ssa_name (exp);
6025 if (g && gimple_assign_cast_p (g))
6027 tree rhs = gimple_assign_rhs1 (g);
6028 tree_code code = gimple_assign_rhs_code (g);
6029 if (CONVERT_EXPR_CODE_P (code)
6030 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6031 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6032 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6033 && (TYPE_PRECISION (TREE_TYPE (exp))
6034 > TYPE_PRECISION (TREE_TYPE (rhs))))
6035 exp = rhs;
6039 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6040 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6041 of CONST_INTs, where we know the old_mode only from the call argument. */
6043 old_mode = GET_MODE (val);
6044 if (old_mode == VOIDmode)
6045 old_mode = TYPE_MODE (TREE_TYPE (exp));
6046 val = convert_modes (mode, old_mode, val, 1);
6047 return val;
6051 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6052 EXP is the CALL_EXPR. CODE is the rtx code
6053 that corresponds to the arithmetic or logical operation from the name;
6054 an exception here is that NOT actually means NAND. TARGET is an optional
6055 place for us to store the results; AFTER is true if this is the
6056 fetch_and_xxx form. */
6058 static rtx
6059 expand_builtin_sync_operation (machine_mode mode, tree exp,
6060 enum rtx_code code, bool after,
6061 rtx target)
6063 rtx val, mem;
6064 location_t loc = EXPR_LOCATION (exp);
6066 if (code == NOT && warn_sync_nand)
6068 tree fndecl = get_callee_fndecl (exp);
6069 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6071 static bool warned_f_a_n, warned_n_a_f;
6073 switch (fcode)
6075 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6076 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6077 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6078 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6079 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6080 if (warned_f_a_n)
6081 break;
6083 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6084 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6085 warned_f_a_n = true;
6086 break;
6088 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6089 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6090 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6091 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6092 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6093 if (warned_n_a_f)
6094 break;
6096 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6097 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6098 warned_n_a_f = true;
6099 break;
6101 default:
6102 gcc_unreachable ();
6106 /* Expand the operands. */
6107 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6108 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6110 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6111 after);
6114 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6115 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6116 true if this is the boolean form. TARGET is a place for us to store the
6117 results; this is NOT optional if IS_BOOL is true. */
6119 static rtx
6120 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6121 bool is_bool, rtx target)
6123 rtx old_val, new_val, mem;
6124 rtx *pbool, *poval;
6126 /* Expand the operands. */
6127 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6128 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6129 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6131 pbool = poval = NULL;
6132 if (target != const0_rtx)
6134 if (is_bool)
6135 pbool = &target;
6136 else
6137 poval = &target;
6139 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6140 false, MEMMODEL_SYNC_SEQ_CST,
6141 MEMMODEL_SYNC_SEQ_CST))
6142 return NULL_RTX;
6144 return target;
6147 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6148 general form is actually an atomic exchange, and some targets only
6149 support a reduced form with the second argument being a constant 1.
6150 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6151 the results. */
6153 static rtx
6154 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6155 rtx target)
6157 rtx val, mem;
6159 /* Expand the operands. */
6160 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6161 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6163 return expand_sync_lock_test_and_set (target, mem, val);
6166 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6168 static void
6169 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6171 rtx mem;
6173 /* Expand the operands. */
6174 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6176 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6179 /* Given an integer representing an ``enum memmodel'', verify its
6180 correctness and return the memory model enum. */
6182 static enum memmodel
6183 get_memmodel (tree exp)
6185 /* If the parameter is not a constant, it's a run time value so we'll just
6186 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6187 if (TREE_CODE (exp) != INTEGER_CST)
6188 return MEMMODEL_SEQ_CST;
6190 rtx op = expand_normal (exp);
6192 unsigned HOST_WIDE_INT val = INTVAL (op);
6193 if (targetm.memmodel_check)
6194 val = targetm.memmodel_check (val);
6195 else if (val & ~MEMMODEL_MASK)
6196 return MEMMODEL_SEQ_CST;
6198 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6199 if (memmodel_base (val) >= MEMMODEL_LAST)
6200 return MEMMODEL_SEQ_CST;
6202 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6203 be conservative and promote consume to acquire. */
6204 if (val == MEMMODEL_CONSUME)
6205 val = MEMMODEL_ACQUIRE;
6207 return (enum memmodel) val;
6210 /* Expand the __atomic_exchange intrinsic:
6211 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6212 EXP is the CALL_EXPR.
6213 TARGET is an optional place for us to store the results. */
6215 static rtx
6216 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6218 rtx val, mem;
6219 enum memmodel model;
6221 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6223 if (!flag_inline_atomics)
6224 return NULL_RTX;
6226 /* Expand the operands. */
6227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6230 return expand_atomic_exchange (target, mem, val, model);
6233 /* Expand the __atomic_compare_exchange intrinsic:
6234 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6235 TYPE desired, BOOL weak,
6236 enum memmodel success,
6237 enum memmodel failure)
6238 EXP is the CALL_EXPR.
6239 TARGET is an optional place for us to store the results. */
6241 static rtx
6242 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6243 rtx target)
6245 rtx expect, desired, mem, oldval;
6246 rtx_code_label *label;
6247 tree weak;
6248 bool is_weak;
6250 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6251 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6253 if (failure > success)
6254 success = MEMMODEL_SEQ_CST;
6256 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6258 failure = MEMMODEL_SEQ_CST;
6259 success = MEMMODEL_SEQ_CST;
6263 if (!flag_inline_atomics)
6264 return NULL_RTX;
6266 /* Expand the operands. */
6267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6269 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6270 expect = convert_memory_address (Pmode, expect);
6271 expect = gen_rtx_MEM (mode, expect);
6272 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6274 weak = CALL_EXPR_ARG (exp, 3);
6275 is_weak = false;
6276 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6277 is_weak = true;
6279 if (target == const0_rtx)
6280 target = NULL;
6282 /* Lest the rtl backend create a race condition with an imporoper store
6283 to memory, always create a new pseudo for OLDVAL. */
6284 oldval = NULL;
6286 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6287 is_weak, success, failure))
6288 return NULL_RTX;
6290 /* Conditionally store back to EXPECT, lest we create a race condition
6291 with an improper store to memory. */
6292 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6293 the normal case where EXPECT is totally private, i.e. a register. At
6294 which point the store can be unconditional. */
6295 label = gen_label_rtx ();
6296 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6297 GET_MODE (target), 1, label);
6298 emit_move_insn (expect, oldval);
6299 emit_label (label);
6301 return target;
6304 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6305 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6306 call. The weak parameter must be dropped to match the expected parameter
6307 list and the expected argument changed from value to pointer to memory
6308 slot. */
6310 static void
6311 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6313 unsigned int z;
6314 vec<tree, va_gc> *vec;
6316 vec_alloc (vec, 5);
6317 vec->quick_push (gimple_call_arg (call, 0));
6318 tree expected = gimple_call_arg (call, 1);
6319 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6320 TREE_TYPE (expected));
6321 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6322 if (expd != x)
6323 emit_move_insn (x, expd);
6324 tree v = make_tree (TREE_TYPE (expected), x);
6325 vec->quick_push (build1 (ADDR_EXPR,
6326 build_pointer_type (TREE_TYPE (expected)), v));
6327 vec->quick_push (gimple_call_arg (call, 2));
6328 /* Skip the boolean weak parameter. */
6329 for (z = 4; z < 6; z++)
6330 vec->quick_push (gimple_call_arg (call, z));
6331 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6332 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6333 gcc_assert (bytes_log2 < 5);
6334 built_in_function fncode
6335 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6336 + bytes_log2);
6337 tree fndecl = builtin_decl_explicit (fncode);
6338 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6339 fndecl);
6340 tree exp = build_call_vec (boolean_type_node, fn, vec);
6341 tree lhs = gimple_call_lhs (call);
6342 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6343 if (lhs)
6345 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6346 if (GET_MODE (boolret) != mode)
6347 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6348 x = force_reg (mode, x);
6349 write_complex_part (target, boolret, true, true);
6350 write_complex_part (target, x, false, false);
6354 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6356 void
6357 expand_ifn_atomic_compare_exchange (gcall *call)
6359 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6360 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6361 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6363 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6364 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6366 if (failure > success)
6367 success = MEMMODEL_SEQ_CST;
6369 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6371 failure = MEMMODEL_SEQ_CST;
6372 success = MEMMODEL_SEQ_CST;
6375 if (!flag_inline_atomics)
6377 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6378 return;
6381 /* Expand the operands. */
6382 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6384 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6385 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6387 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6389 rtx boolret = NULL;
6390 rtx oldval = NULL;
6392 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6393 is_weak, success, failure))
6395 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6396 return;
6399 tree lhs = gimple_call_lhs (call);
6400 if (lhs)
6402 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6403 if (GET_MODE (boolret) != mode)
6404 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6405 write_complex_part (target, boolret, true, true);
6406 write_complex_part (target, oldval, false, false);
6410 /* Expand the __atomic_load intrinsic:
6411 TYPE __atomic_load (TYPE *object, enum memmodel)
6412 EXP is the CALL_EXPR.
6413 TARGET is an optional place for us to store the results. */
6415 static rtx
6416 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6418 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6419 if (is_mm_release (model) || is_mm_acq_rel (model))
6420 model = MEMMODEL_SEQ_CST;
6422 if (!flag_inline_atomics)
6423 return NULL_RTX;
6425 /* Expand the operand. */
6426 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6428 return expand_atomic_load (target, mem, model);
6432 /* Expand the __atomic_store intrinsic:
6433 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6434 EXP is the CALL_EXPR.
6435 TARGET is an optional place for us to store the results. */
6437 static rtx
6438 expand_builtin_atomic_store (machine_mode mode, tree exp)
6440 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6441 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6442 || is_mm_release (model)))
6443 model = MEMMODEL_SEQ_CST;
6445 if (!flag_inline_atomics)
6446 return NULL_RTX;
6448 /* Expand the operands. */
6449 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6450 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6452 return expand_atomic_store (mem, val, model, false);
6455 /* Expand the __atomic_fetch_XXX intrinsic:
6456 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6457 EXP is the CALL_EXPR.
6458 TARGET is an optional place for us to store the results.
6459 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6460 FETCH_AFTER is true if returning the result of the operation.
6461 FETCH_AFTER is false if returning the value before the operation.
6462 IGNORE is true if the result is not used.
6463 EXT_CALL is the correct builtin for an external call if this cannot be
6464 resolved to an instruction sequence. */
6466 static rtx
6467 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6468 enum rtx_code code, bool fetch_after,
6469 bool ignore, enum built_in_function ext_call)
6471 rtx val, mem, ret;
6472 enum memmodel model;
6473 tree fndecl;
6474 tree addr;
6476 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6478 /* Expand the operands. */
6479 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6480 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6482 /* Only try generating instructions if inlining is turned on. */
6483 if (flag_inline_atomics)
6485 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6486 if (ret)
6487 return ret;
6490 /* Return if a different routine isn't needed for the library call. */
6491 if (ext_call == BUILT_IN_NONE)
6492 return NULL_RTX;
6494 /* Change the call to the specified function. */
6495 fndecl = get_callee_fndecl (exp);
6496 addr = CALL_EXPR_FN (exp);
6497 STRIP_NOPS (addr);
6499 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6500 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6502 /* If we will emit code after the call, the call cannot be a tail call.
6503 If it is emitted as a tail call, a barrier is emitted after it, and
6504 then all trailing code is removed. */
6505 if (!ignore)
6506 CALL_EXPR_TAILCALL (exp) = 0;
6508 /* Expand the call here so we can emit trailing code. */
6509 ret = expand_call (exp, target, ignore);
6511 /* Replace the original function just in case it matters. */
6512 TREE_OPERAND (addr, 0) = fndecl;
6514 /* Then issue the arithmetic correction to return the right result. */
6515 if (!ignore)
6517 if (code == NOT)
6519 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6520 OPTAB_LIB_WIDEN);
6521 ret = expand_simple_unop (mode, NOT, ret, target, true);
6523 else
6524 ret = expand_simple_binop (mode, code, ret, val, target, true,
6525 OPTAB_LIB_WIDEN);
6527 return ret;
6530 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6532 void
6533 expand_ifn_atomic_bit_test_and (gcall *call)
6535 tree ptr = gimple_call_arg (call, 0);
6536 tree bit = gimple_call_arg (call, 1);
6537 tree flag = gimple_call_arg (call, 2);
6538 tree lhs = gimple_call_lhs (call);
6539 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6540 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6541 enum rtx_code code;
6542 optab optab;
6543 class expand_operand ops[5];
6545 gcc_assert (flag_inline_atomics);
6547 if (gimple_call_num_args (call) == 5)
6548 model = get_memmodel (gimple_call_arg (call, 3));
6550 rtx mem = get_builtin_sync_mem (ptr, mode);
6551 rtx val = expand_expr_force_mode (bit, mode);
6553 switch (gimple_call_internal_fn (call))
6555 case IFN_ATOMIC_BIT_TEST_AND_SET:
6556 code = IOR;
6557 optab = atomic_bit_test_and_set_optab;
6558 break;
6559 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6560 code = XOR;
6561 optab = atomic_bit_test_and_complement_optab;
6562 break;
6563 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6564 code = AND;
6565 optab = atomic_bit_test_and_reset_optab;
6566 break;
6567 default:
6568 gcc_unreachable ();
6571 if (lhs == NULL_TREE)
6573 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6574 val, NULL_RTX, true, OPTAB_DIRECT);
6575 if (code == AND)
6576 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6577 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6578 return;
6581 rtx target;
6582 if (lhs)
6583 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6584 else
6585 target = gen_reg_rtx (mode);
6586 enum insn_code icode = direct_optab_handler (optab, mode);
6587 gcc_assert (icode != CODE_FOR_nothing);
6588 create_output_operand (&ops[0], target, mode);
6589 create_fixed_operand (&ops[1], mem);
6590 create_convert_operand_to (&ops[2], val, mode, true);
6591 create_integer_operand (&ops[3], model);
6592 create_integer_operand (&ops[4], integer_onep (flag));
6593 if (maybe_expand_insn (icode, 5, ops))
6594 return;
6596 rtx bitval = val;
6597 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6598 val, NULL_RTX, true, OPTAB_DIRECT);
6599 rtx maskval = val;
6600 if (code == AND)
6601 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6602 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6603 code, model, false);
6604 if (!result)
6606 bool is_atomic = gimple_call_num_args (call) == 5;
6607 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6608 tree fndecl = gimple_call_addr_fndecl (tcall);
6609 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6610 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6611 make_tree (type, val),
6612 is_atomic
6613 ? gimple_call_arg (call, 3)
6614 : integer_zero_node);
6615 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6616 mode, !lhs);
6618 if (!lhs)
6619 return;
6620 if (integer_onep (flag))
6622 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6623 NULL_RTX, true, OPTAB_DIRECT);
6624 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6625 true, OPTAB_DIRECT);
6627 else
6628 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6629 OPTAB_DIRECT);
6630 if (result != target)
6631 emit_move_insn (target, result);
6634 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6636 void
6637 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6639 tree cmp = gimple_call_arg (call, 0);
6640 tree ptr = gimple_call_arg (call, 1);
6641 tree arg = gimple_call_arg (call, 2);
6642 tree lhs = gimple_call_lhs (call);
6643 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6644 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6645 optab optab;
6646 rtx_code code;
6647 class expand_operand ops[5];
6649 gcc_assert (flag_inline_atomics);
6651 if (gimple_call_num_args (call) == 5)
6652 model = get_memmodel (gimple_call_arg (call, 3));
6654 rtx mem = get_builtin_sync_mem (ptr, mode);
6655 rtx op = expand_expr_force_mode (arg, mode);
6657 switch (gimple_call_internal_fn (call))
6659 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6660 code = PLUS;
6661 optab = atomic_add_fetch_cmp_0_optab;
6662 break;
6663 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6664 code = MINUS;
6665 optab = atomic_sub_fetch_cmp_0_optab;
6666 break;
6667 case IFN_ATOMIC_AND_FETCH_CMP_0:
6668 code = AND;
6669 optab = atomic_and_fetch_cmp_0_optab;
6670 break;
6671 case IFN_ATOMIC_OR_FETCH_CMP_0:
6672 code = IOR;
6673 optab = atomic_or_fetch_cmp_0_optab;
6674 break;
6675 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6676 code = XOR;
6677 optab = atomic_xor_fetch_cmp_0_optab;
6678 break;
6679 default:
6680 gcc_unreachable ();
6683 enum rtx_code comp = UNKNOWN;
6684 switch (tree_to_uhwi (cmp))
6686 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6687 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6688 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6689 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6690 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6691 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6692 default: gcc_unreachable ();
6695 rtx target;
6696 if (lhs == NULL_TREE)
6697 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6698 else
6699 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6700 enum insn_code icode = direct_optab_handler (optab, mode);
6701 gcc_assert (icode != CODE_FOR_nothing);
6702 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6703 create_fixed_operand (&ops[1], mem);
6704 create_convert_operand_to (&ops[2], op, mode, true);
6705 create_integer_operand (&ops[3], model);
6706 create_integer_operand (&ops[4], comp);
6707 if (maybe_expand_insn (icode, 5, ops))
6708 return;
6710 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6711 code, model, true);
6712 if (!result)
6714 bool is_atomic = gimple_call_num_args (call) == 5;
6715 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6716 tree fndecl = gimple_call_addr_fndecl (tcall);
6717 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6718 tree exp = build_call_nary (type, tcall,
6719 2 + is_atomic, ptr, arg,
6720 is_atomic
6721 ? gimple_call_arg (call, 3)
6722 : integer_zero_node);
6723 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6724 mode, !lhs);
6727 if (lhs)
6729 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6730 0, 1);
6731 if (result != target)
6732 emit_move_insn (target, result);
6736 /* Expand an atomic clear operation.
6737 void _atomic_clear (BOOL *obj, enum memmodel)
6738 EXP is the call expression. */
6740 static rtx
6741 expand_builtin_atomic_clear (tree exp)
6743 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6744 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6745 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6747 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6748 model = MEMMODEL_SEQ_CST;
6750 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6751 Failing that, a store is issued by __atomic_store. The only way this can
6752 fail is if the bool type is larger than a word size. Unlikely, but
6753 handle it anyway for completeness. Assume a single threaded model since
6754 there is no atomic support in this case, and no barriers are required. */
6755 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6756 if (!ret)
6757 emit_move_insn (mem, const0_rtx);
6758 return const0_rtx;
6761 /* Expand an atomic test_and_set operation.
6762 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6763 EXP is the call expression. */
6765 static rtx
6766 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6768 rtx mem;
6769 enum memmodel model;
6770 machine_mode mode;
6772 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6773 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6774 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6776 return expand_atomic_test_and_set (target, mem, model);
6780 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6781 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6783 static tree
6784 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6786 int size;
6787 machine_mode mode;
6788 unsigned int mode_align, type_align;
6790 if (TREE_CODE (arg0) != INTEGER_CST)
6791 return NULL_TREE;
6793 /* We need a corresponding integer mode for the access to be lock-free. */
6794 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6795 if (!int_mode_for_size (size, 0).exists (&mode))
6796 return boolean_false_node;
6798 mode_align = GET_MODE_ALIGNMENT (mode);
6800 if (TREE_CODE (arg1) == INTEGER_CST)
6802 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6804 /* Either this argument is null, or it's a fake pointer encoding
6805 the alignment of the object. */
6806 val = least_bit_hwi (val);
6807 val *= BITS_PER_UNIT;
6809 if (val == 0 || mode_align < val)
6810 type_align = mode_align;
6811 else
6812 type_align = val;
6814 else
6816 tree ttype = TREE_TYPE (arg1);
6818 /* This function is usually invoked and folded immediately by the front
6819 end before anything else has a chance to look at it. The pointer
6820 parameter at this point is usually cast to a void *, so check for that
6821 and look past the cast. */
6822 if (CONVERT_EXPR_P (arg1)
6823 && POINTER_TYPE_P (ttype)
6824 && VOID_TYPE_P (TREE_TYPE (ttype))
6825 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6826 arg1 = TREE_OPERAND (arg1, 0);
6828 ttype = TREE_TYPE (arg1);
6829 gcc_assert (POINTER_TYPE_P (ttype));
6831 /* Get the underlying type of the object. */
6832 ttype = TREE_TYPE (ttype);
6833 type_align = TYPE_ALIGN (ttype);
6836 /* If the object has smaller alignment, the lock free routines cannot
6837 be used. */
6838 if (type_align < mode_align)
6839 return boolean_false_node;
6841 /* Check if a compare_and_swap pattern exists for the mode which represents
6842 the required size. The pattern is not allowed to fail, so the existence
6843 of the pattern indicates support is present. Also require that an
6844 atomic load exists for the required size. */
6845 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6846 return boolean_true_node;
6847 else
6848 return boolean_false_node;
6851 /* Return true if the parameters to call EXP represent an object which will
6852 always generate lock free instructions. The first argument represents the
6853 size of the object, and the second parameter is a pointer to the object
6854 itself. If NULL is passed for the object, then the result is based on
6855 typical alignment for an object of the specified size. Otherwise return
6856 false. */
6858 static rtx
6859 expand_builtin_atomic_always_lock_free (tree exp)
6861 tree size;
6862 tree arg0 = CALL_EXPR_ARG (exp, 0);
6863 tree arg1 = CALL_EXPR_ARG (exp, 1);
6865 if (TREE_CODE (arg0) != INTEGER_CST)
6867 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6868 return const0_rtx;
6871 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6872 if (size == boolean_true_node)
6873 return const1_rtx;
6874 return const0_rtx;
6877 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6878 is lock free on this architecture. */
6880 static tree
6881 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6883 if (!flag_inline_atomics)
6884 return NULL_TREE;
6886 /* If it isn't always lock free, don't generate a result. */
6887 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6888 return boolean_true_node;
6890 return NULL_TREE;
6893 /* Return true if the parameters to call EXP represent an object which will
6894 always generate lock free instructions. The first argument represents the
6895 size of the object, and the second parameter is a pointer to the object
6896 itself. If NULL is passed for the object, then the result is based on
6897 typical alignment for an object of the specified size. Otherwise return
6898 NULL*/
6900 static rtx
6901 expand_builtin_atomic_is_lock_free (tree exp)
6903 tree size;
6904 tree arg0 = CALL_EXPR_ARG (exp, 0);
6905 tree arg1 = CALL_EXPR_ARG (exp, 1);
6907 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6909 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6910 return NULL_RTX;
6913 if (!flag_inline_atomics)
6914 return NULL_RTX;
6916 /* If the value is known at compile time, return the RTX for it. */
6917 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6918 if (size == boolean_true_node)
6919 return const1_rtx;
6921 return NULL_RTX;
6924 /* Expand the __atomic_thread_fence intrinsic:
6925 void __atomic_thread_fence (enum memmodel)
6926 EXP is the CALL_EXPR. */
6928 static void
6929 expand_builtin_atomic_thread_fence (tree exp)
6931 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6932 expand_mem_thread_fence (model);
6935 /* Expand the __atomic_signal_fence intrinsic:
6936 void __atomic_signal_fence (enum memmodel)
6937 EXP is the CALL_EXPR. */
6939 static void
6940 expand_builtin_atomic_signal_fence (tree exp)
6942 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6943 expand_mem_signal_fence (model);
6946 /* Expand the __sync_synchronize intrinsic. */
6948 static void
6949 expand_builtin_sync_synchronize (void)
6951 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6954 static rtx
6955 expand_builtin_thread_pointer (tree exp, rtx target)
6957 enum insn_code icode;
6958 if (!validate_arglist (exp, VOID_TYPE))
6959 return const0_rtx;
6960 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6961 if (icode != CODE_FOR_nothing)
6963 class expand_operand op;
6964 /* If the target is not sutitable then create a new target. */
6965 if (target == NULL_RTX
6966 || !REG_P (target)
6967 || GET_MODE (target) != Pmode)
6968 target = gen_reg_rtx (Pmode);
6969 create_output_operand (&op, target, Pmode);
6970 expand_insn (icode, 1, &op);
6971 return target;
6973 error ("%<__builtin_thread_pointer%> is not supported on this target");
6974 return const0_rtx;
6977 static void
6978 expand_builtin_set_thread_pointer (tree exp)
6980 enum insn_code icode;
6981 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6982 return;
6983 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6984 if (icode != CODE_FOR_nothing)
6986 class expand_operand op;
6987 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6988 Pmode, EXPAND_NORMAL);
6989 create_input_operand (&op, val, Pmode);
6990 expand_insn (icode, 1, &op);
6991 return;
6993 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6997 /* Emit code to restore the current value of stack. */
6999 static void
7000 expand_stack_restore (tree var)
7002 rtx_insn *prev;
7003 rtx sa = expand_normal (var);
7005 sa = convert_memory_address (Pmode, sa);
7007 prev = get_last_insn ();
7008 emit_stack_restore (SAVE_BLOCK, sa);
7010 record_new_stack_level ();
7012 fixup_args_size_notes (prev, get_last_insn (), 0);
7015 /* Emit code to save the current value of stack. */
7017 static rtx
7018 expand_stack_save (void)
7020 rtx ret = NULL_RTX;
7022 emit_stack_save (SAVE_BLOCK, &ret);
7023 return ret;
7026 /* Emit code to get the openacc gang, worker or vector id or size. */
7028 static rtx
7029 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7031 const char *name;
7032 rtx fallback_retval;
7033 rtx_insn *(*gen_fn) (rtx, rtx);
7034 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7036 case BUILT_IN_GOACC_PARLEVEL_ID:
7037 name = "__builtin_goacc_parlevel_id";
7038 fallback_retval = const0_rtx;
7039 gen_fn = targetm.gen_oacc_dim_pos;
7040 break;
7041 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7042 name = "__builtin_goacc_parlevel_size";
7043 fallback_retval = const1_rtx;
7044 gen_fn = targetm.gen_oacc_dim_size;
7045 break;
7046 default:
7047 gcc_unreachable ();
7050 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7052 error ("%qs only supported in OpenACC code", name);
7053 return const0_rtx;
7056 tree arg = CALL_EXPR_ARG (exp, 0);
7057 if (TREE_CODE (arg) != INTEGER_CST)
7059 error ("non-constant argument 0 to %qs", name);
7060 return const0_rtx;
7063 int dim = TREE_INT_CST_LOW (arg);
7064 switch (dim)
7066 case GOMP_DIM_GANG:
7067 case GOMP_DIM_WORKER:
7068 case GOMP_DIM_VECTOR:
7069 break;
7070 default:
7071 error ("illegal argument 0 to %qs", name);
7072 return const0_rtx;
7075 if (ignore)
7076 return target;
7078 if (target == NULL_RTX)
7079 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7081 if (!targetm.have_oacc_dim_size ())
7083 emit_move_insn (target, fallback_retval);
7084 return target;
7087 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7088 emit_insn (gen_fn (reg, GEN_INT (dim)));
7089 if (reg != target)
7090 emit_move_insn (target, reg);
7092 return target;
7095 /* Expand a string compare operation using a sequence of char comparison
7096 to get rid of the calling overhead, with result going to TARGET if
7097 that's convenient.
7099 VAR_STR is the variable string source;
7100 CONST_STR is the constant string source;
7101 LENGTH is the number of chars to compare;
7102 CONST_STR_N indicates which source string is the constant string;
7103 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7105 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7107 target = (int) (unsigned char) var_str[0]
7108 - (int) (unsigned char) const_str[0];
7109 if (target != 0)
7110 goto ne_label;
7112 target = (int) (unsigned char) var_str[length - 2]
7113 - (int) (unsigned char) const_str[length - 2];
7114 if (target != 0)
7115 goto ne_label;
7116 target = (int) (unsigned char) var_str[length - 1]
7117 - (int) (unsigned char) const_str[length - 1];
7118 ne_label:
7121 static rtx
7122 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7123 unsigned HOST_WIDE_INT length,
7124 int const_str_n, machine_mode mode)
7126 HOST_WIDE_INT offset = 0;
7127 rtx var_rtx_array
7128 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7129 rtx var_rtx = NULL_RTX;
7130 rtx const_rtx = NULL_RTX;
7131 rtx result = target ? target : gen_reg_rtx (mode);
7132 rtx_code_label *ne_label = gen_label_rtx ();
7133 tree unit_type_node = unsigned_char_type_node;
7134 scalar_int_mode unit_mode
7135 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7137 start_sequence ();
7139 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7141 var_rtx
7142 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7143 const_rtx = c_readstr (const_str + offset, unit_mode);
7144 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7145 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7147 op0 = convert_modes (mode, unit_mode, op0, 1);
7148 op1 = convert_modes (mode, unit_mode, op1, 1);
7149 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7150 result, 1, OPTAB_WIDEN);
7152 /* Force the difference into result register. We cannot reassign
7153 result here ("result = diff") or we may end up returning
7154 uninitialized result when expand_simple_binop allocates a new
7155 pseudo-register for returning. */
7156 if (diff != result)
7157 emit_move_insn (result, diff);
7159 if (i < length - 1)
7160 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7161 mode, true, ne_label);
7162 offset += GET_MODE_SIZE (unit_mode);
7165 emit_label (ne_label);
7166 rtx_insn *insns = get_insns ();
7167 end_sequence ();
7168 emit_insn (insns);
7170 return result;
7173 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7174 to TARGET if that's convenient.
7175 If the call is not been inlined, return NULL_RTX. */
7177 static rtx
7178 inline_expand_builtin_bytecmp (tree exp, rtx target)
7180 tree fndecl = get_callee_fndecl (exp);
7181 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7182 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7184 /* Do NOT apply this inlining expansion when optimizing for size or
7185 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7186 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7187 return NULL_RTX;
7189 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7190 || fcode == BUILT_IN_STRNCMP
7191 || fcode == BUILT_IN_MEMCMP);
7193 /* On a target where the type of the call (int) has same or narrower presicion
7194 than unsigned char, give up the inlining expansion. */
7195 if (TYPE_PRECISION (unsigned_char_type_node)
7196 >= TYPE_PRECISION (TREE_TYPE (exp)))
7197 return NULL_RTX;
7199 tree arg1 = CALL_EXPR_ARG (exp, 0);
7200 tree arg2 = CALL_EXPR_ARG (exp, 1);
7201 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7203 unsigned HOST_WIDE_INT len1 = 0;
7204 unsigned HOST_WIDE_INT len2 = 0;
7205 unsigned HOST_WIDE_INT len3 = 0;
7207 /* Get the object representation of the initializers of ARG1 and ARG2
7208 as strings, provided they refer to constant objects, with their byte
7209 sizes in LEN1 and LEN2, respectively. */
7210 const char *bytes1 = getbyterep (arg1, &len1);
7211 const char *bytes2 = getbyterep (arg2, &len2);
7213 /* Fail if neither argument refers to an initialized constant. */
7214 if (!bytes1 && !bytes2)
7215 return NULL_RTX;
7217 if (is_ncmp)
7219 /* Fail if the memcmp/strncmp bound is not a constant. */
7220 if (!tree_fits_uhwi_p (len3_tree))
7221 return NULL_RTX;
7223 len3 = tree_to_uhwi (len3_tree);
7225 if (fcode == BUILT_IN_MEMCMP)
7227 /* Fail if the memcmp bound is greater than the size of either
7228 of the two constant objects. */
7229 if ((bytes1 && len1 < len3)
7230 || (bytes2 && len2 < len3))
7231 return NULL_RTX;
7235 if (fcode != BUILT_IN_MEMCMP)
7237 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7238 and LEN2 to the length of the nul-terminated string stored
7239 in each. */
7240 if (bytes1 != NULL)
7241 len1 = strnlen (bytes1, len1) + 1;
7242 if (bytes2 != NULL)
7243 len2 = strnlen (bytes2, len2) + 1;
7246 /* See inline_string_cmp. */
7247 int const_str_n;
7248 if (!len1)
7249 const_str_n = 2;
7250 else if (!len2)
7251 const_str_n = 1;
7252 else if (len2 > len1)
7253 const_str_n = 1;
7254 else
7255 const_str_n = 2;
7257 /* For strncmp only, compute the new bound as the smallest of
7258 the lengths of the two strings (plus 1) and the bound provided
7259 to the function. */
7260 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7261 if (is_ncmp && len3 < bound)
7262 bound = len3;
7264 /* If the bound of the comparison is larger than the threshold,
7265 do nothing. */
7266 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7267 return NULL_RTX;
7269 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7271 /* Now, start inline expansion the call. */
7272 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7273 (const_str_n == 1) ? bytes1 : bytes2, bound,
7274 const_str_n, mode);
7277 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7278 represents the size of the first argument to that call, or VOIDmode
7279 if the argument is a pointer. IGNORE will be true if the result
7280 isn't used. */
7281 static rtx
7282 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7283 bool ignore)
7285 rtx val, failsafe;
7286 unsigned nargs = call_expr_nargs (exp);
7288 tree arg0 = CALL_EXPR_ARG (exp, 0);
7290 if (mode == VOIDmode)
7292 mode = TYPE_MODE (TREE_TYPE (arg0));
7293 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7296 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7298 /* An optional second argument can be used as a failsafe value on
7299 some machines. If it isn't present, then the failsafe value is
7300 assumed to be 0. */
7301 if (nargs > 1)
7303 tree arg1 = CALL_EXPR_ARG (exp, 1);
7304 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7306 else
7307 failsafe = const0_rtx;
7309 /* If the result isn't used, the behavior is undefined. It would be
7310 nice to emit a warning here, but path splitting means this might
7311 happen with legitimate code. So simply drop the builtin
7312 expansion in that case; we've handled any side-effects above. */
7313 if (ignore)
7314 return const0_rtx;
7316 /* If we don't have a suitable target, create one to hold the result. */
7317 if (target == NULL || GET_MODE (target) != mode)
7318 target = gen_reg_rtx (mode);
7320 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7321 val = convert_modes (mode, VOIDmode, val, false);
7323 return targetm.speculation_safe_value (mode, target, val, failsafe);
7326 /* Expand an expression EXP that calls a built-in function,
7327 with result going to TARGET if that's convenient
7328 (and in mode MODE if that's convenient).
7329 SUBTARGET may be used as the target for computing one of EXP's operands.
7330 IGNORE is nonzero if the value is to be ignored. */
7333 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7334 int ignore)
7336 tree fndecl = get_callee_fndecl (exp);
7337 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7338 int flags;
7340 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7341 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7343 /* When ASan is enabled, we don't want to expand some memory/string
7344 builtins and rely on libsanitizer's hooks. This allows us to avoid
7345 redundant checks and be sure, that possible overflow will be detected
7346 by ASan. */
7348 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7349 if (param_asan_kernel_mem_intrinsic_prefix
7350 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7351 | SANITIZE_KERNEL_HWADDRESS))
7352 switch (fcode)
7354 rtx save_decl_rtl, ret;
7355 case BUILT_IN_MEMCPY:
7356 case BUILT_IN_MEMMOVE:
7357 case BUILT_IN_MEMSET:
7358 save_decl_rtl = DECL_RTL (fndecl);
7359 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7360 ret = expand_call (exp, target, ignore);
7361 DECL_RTL (fndecl) = save_decl_rtl;
7362 return ret;
7363 default:
7364 break;
7366 if (sanitize_flags_p (SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7367 return expand_call (exp, target, ignore);
7369 /* When not optimizing, generate calls to library functions for a certain
7370 set of builtins. */
7371 if (!optimize
7372 && !called_as_built_in (fndecl)
7373 && fcode != BUILT_IN_FORK
7374 && fcode != BUILT_IN_EXECL
7375 && fcode != BUILT_IN_EXECV
7376 && fcode != BUILT_IN_EXECLP
7377 && fcode != BUILT_IN_EXECLE
7378 && fcode != BUILT_IN_EXECVP
7379 && fcode != BUILT_IN_EXECVE
7380 && fcode != BUILT_IN_CLEAR_CACHE
7381 && !ALLOCA_FUNCTION_CODE_P (fcode)
7382 && fcode != BUILT_IN_FREE)
7383 return expand_call (exp, target, ignore);
7385 /* The built-in function expanders test for target == const0_rtx
7386 to determine whether the function's result will be ignored. */
7387 if (ignore)
7388 target = const0_rtx;
7390 /* If the result of a pure or const built-in function is ignored, and
7391 none of its arguments are volatile, we can avoid expanding the
7392 built-in call and just evaluate the arguments for side-effects. */
7393 if (target == const0_rtx
7394 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7395 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7397 bool volatilep = false;
7398 tree arg;
7399 call_expr_arg_iterator iter;
7401 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7402 if (TREE_THIS_VOLATILE (arg))
7404 volatilep = true;
7405 break;
7408 if (! volatilep)
7410 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7411 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7412 return const0_rtx;
7416 switch (fcode)
7418 CASE_FLT_FN (BUILT_IN_FABS):
7419 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7420 case BUILT_IN_FABSD32:
7421 case BUILT_IN_FABSD64:
7422 case BUILT_IN_FABSD128:
7423 target = expand_builtin_fabs (exp, target, subtarget);
7424 if (target)
7425 return target;
7426 break;
7428 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7429 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7430 target = expand_builtin_copysign (exp, target, subtarget);
7431 if (target)
7432 return target;
7433 break;
7435 /* Just do a normal library call if we were unable to fold
7436 the values. */
7437 CASE_FLT_FN (BUILT_IN_CABS):
7438 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7439 break;
7441 CASE_FLT_FN (BUILT_IN_FMA):
7442 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7443 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7444 if (target)
7445 return target;
7446 break;
7448 CASE_FLT_FN (BUILT_IN_ILOGB):
7449 if (! flag_unsafe_math_optimizations)
7450 break;
7451 gcc_fallthrough ();
7452 CASE_FLT_FN (BUILT_IN_ISINF):
7453 CASE_FLT_FN (BUILT_IN_FINITE):
7454 case BUILT_IN_ISFINITE:
7455 case BUILT_IN_ISNORMAL:
7456 target = expand_builtin_interclass_mathfn (exp, target);
7457 if (target)
7458 return target;
7459 break;
7461 case BUILT_IN_ISSIGNALING:
7462 target = expand_builtin_issignaling (exp, target);
7463 if (target)
7464 return target;
7465 break;
7467 CASE_FLT_FN (BUILT_IN_ICEIL):
7468 CASE_FLT_FN (BUILT_IN_LCEIL):
7469 CASE_FLT_FN (BUILT_IN_LLCEIL):
7470 CASE_FLT_FN (BUILT_IN_LFLOOR):
7471 CASE_FLT_FN (BUILT_IN_IFLOOR):
7472 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7473 target = expand_builtin_int_roundingfn (exp, target);
7474 if (target)
7475 return target;
7476 break;
7478 CASE_FLT_FN (BUILT_IN_IRINT):
7479 CASE_FLT_FN (BUILT_IN_LRINT):
7480 CASE_FLT_FN (BUILT_IN_LLRINT):
7481 CASE_FLT_FN (BUILT_IN_IROUND):
7482 CASE_FLT_FN (BUILT_IN_LROUND):
7483 CASE_FLT_FN (BUILT_IN_LLROUND):
7484 target = expand_builtin_int_roundingfn_2 (exp, target);
7485 if (target)
7486 return target;
7487 break;
7489 CASE_FLT_FN (BUILT_IN_POWI):
7490 target = expand_builtin_powi (exp, target);
7491 if (target)
7492 return target;
7493 break;
7495 CASE_FLT_FN (BUILT_IN_CEXPI):
7496 target = expand_builtin_cexpi (exp, target);
7497 gcc_assert (target);
7498 return target;
7500 CASE_FLT_FN (BUILT_IN_SIN):
7501 CASE_FLT_FN (BUILT_IN_COS):
7502 if (! flag_unsafe_math_optimizations)
7503 break;
7504 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7505 if (target)
7506 return target;
7507 break;
7509 CASE_FLT_FN (BUILT_IN_SINCOS):
7510 if (! flag_unsafe_math_optimizations)
7511 break;
7512 target = expand_builtin_sincos (exp);
7513 if (target)
7514 return target;
7515 break;
7517 case BUILT_IN_FEGETROUND:
7518 target = expand_builtin_fegetround (exp, target, target_mode);
7519 if (target)
7520 return target;
7521 break;
7523 case BUILT_IN_FECLEAREXCEPT:
7524 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7525 feclearexcept_optab);
7526 if (target)
7527 return target;
7528 break;
7530 case BUILT_IN_FERAISEEXCEPT:
7531 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7532 feraiseexcept_optab);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_APPLY_ARGS:
7538 return expand_builtin_apply_args ();
7540 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7541 FUNCTION with a copy of the parameters described by
7542 ARGUMENTS, and ARGSIZE. It returns a block of memory
7543 allocated on the stack into which is stored all the registers
7544 that might possibly be used for returning the result of a
7545 function. ARGUMENTS is the value returned by
7546 __builtin_apply_args. ARGSIZE is the number of bytes of
7547 arguments that must be copied. ??? How should this value be
7548 computed? We'll also need a safe worst case value for varargs
7549 functions. */
7550 case BUILT_IN_APPLY:
7551 if (!validate_arglist (exp, POINTER_TYPE,
7552 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7553 && !validate_arglist (exp, REFERENCE_TYPE,
7554 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7555 return const0_rtx;
7556 else
7558 rtx ops[3];
7560 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7561 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7562 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7564 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7567 /* __builtin_return (RESULT) causes the function to return the
7568 value described by RESULT. RESULT is address of the block of
7569 memory returned by __builtin_apply. */
7570 case BUILT_IN_RETURN:
7571 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7572 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7573 return const0_rtx;
7575 case BUILT_IN_SAVEREGS:
7576 return expand_builtin_saveregs ();
7578 case BUILT_IN_VA_ARG_PACK:
7579 /* All valid uses of __builtin_va_arg_pack () are removed during
7580 inlining. */
7581 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7582 return const0_rtx;
7584 case BUILT_IN_VA_ARG_PACK_LEN:
7585 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7586 inlining. */
7587 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7588 return const0_rtx;
7590 /* Return the address of the first anonymous stack arg. */
7591 case BUILT_IN_NEXT_ARG:
7592 if (fold_builtin_next_arg (exp, false))
7593 return const0_rtx;
7594 return expand_builtin_next_arg ();
7596 case BUILT_IN_CLEAR_CACHE:
7597 expand_builtin___clear_cache (exp);
7598 return const0_rtx;
7600 case BUILT_IN_CLASSIFY_TYPE:
7601 return expand_builtin_classify_type (exp);
7603 case BUILT_IN_CONSTANT_P:
7604 return const0_rtx;
7606 case BUILT_IN_FRAME_ADDRESS:
7607 case BUILT_IN_RETURN_ADDRESS:
7608 return expand_builtin_frame_address (fndecl, exp);
7610 /* Returns the address of the area where the structure is returned.
7611 0 otherwise. */
7612 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7613 if (call_expr_nargs (exp) != 0
7614 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7615 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7616 return const0_rtx;
7617 else
7618 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7620 CASE_BUILT_IN_ALLOCA:
7621 target = expand_builtin_alloca (exp);
7622 if (target)
7623 return target;
7624 break;
7626 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7627 return expand_asan_emit_allocas_unpoison (exp);
7629 case BUILT_IN_STACK_SAVE:
7630 return expand_stack_save ();
7632 case BUILT_IN_STACK_RESTORE:
7633 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7634 return const0_rtx;
7636 case BUILT_IN_BSWAP16:
7637 case BUILT_IN_BSWAP32:
7638 case BUILT_IN_BSWAP64:
7639 case BUILT_IN_BSWAP128:
7640 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7641 if (target)
7642 return target;
7643 break;
7645 CASE_INT_FN (BUILT_IN_FFS):
7646 target = expand_builtin_unop (target_mode, exp, target,
7647 subtarget, ffs_optab);
7648 if (target)
7649 return target;
7650 break;
7652 CASE_INT_FN (BUILT_IN_CLZ):
7653 target = expand_builtin_unop (target_mode, exp, target,
7654 subtarget, clz_optab);
7655 if (target)
7656 return target;
7657 break;
7659 CASE_INT_FN (BUILT_IN_CTZ):
7660 target = expand_builtin_unop (target_mode, exp, target,
7661 subtarget, ctz_optab);
7662 if (target)
7663 return target;
7664 break;
7666 CASE_INT_FN (BUILT_IN_CLRSB):
7667 target = expand_builtin_unop (target_mode, exp, target,
7668 subtarget, clrsb_optab);
7669 if (target)
7670 return target;
7671 break;
7673 CASE_INT_FN (BUILT_IN_POPCOUNT):
7674 target = expand_builtin_unop (target_mode, exp, target,
7675 subtarget, popcount_optab);
7676 if (target)
7677 return target;
7678 break;
7680 CASE_INT_FN (BUILT_IN_PARITY):
7681 target = expand_builtin_unop (target_mode, exp, target,
7682 subtarget, parity_optab);
7683 if (target)
7684 return target;
7685 break;
7687 case BUILT_IN_STRLEN:
7688 target = expand_builtin_strlen (exp, target, target_mode);
7689 if (target)
7690 return target;
7691 break;
7693 case BUILT_IN_STRNLEN:
7694 target = expand_builtin_strnlen (exp, target, target_mode);
7695 if (target)
7696 return target;
7697 break;
7699 case BUILT_IN_STRCPY:
7700 target = expand_builtin_strcpy (exp, target);
7701 if (target)
7702 return target;
7703 break;
7705 case BUILT_IN_STRNCPY:
7706 target = expand_builtin_strncpy (exp, target);
7707 if (target)
7708 return target;
7709 break;
7711 case BUILT_IN_STPCPY:
7712 target = expand_builtin_stpcpy (exp, target, mode);
7713 if (target)
7714 return target;
7715 break;
7717 case BUILT_IN_MEMCPY:
7718 target = expand_builtin_memcpy (exp, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_MEMMOVE:
7724 target = expand_builtin_memmove (exp, target);
7725 if (target)
7726 return target;
7727 break;
7729 case BUILT_IN_MEMPCPY:
7730 target = expand_builtin_mempcpy (exp, target);
7731 if (target)
7732 return target;
7733 break;
7735 case BUILT_IN_MEMSET:
7736 target = expand_builtin_memset (exp, target, mode);
7737 if (target)
7738 return target;
7739 break;
7741 case BUILT_IN_BZERO:
7742 target = expand_builtin_bzero (exp);
7743 if (target)
7744 return target;
7745 break;
7747 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7748 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7749 when changing it to a strcmp call. */
7750 case BUILT_IN_STRCMP_EQ:
7751 target = expand_builtin_memcmp (exp, target, true);
7752 if (target)
7753 return target;
7755 /* Change this call back to a BUILT_IN_STRCMP. */
7756 TREE_OPERAND (exp, 1)
7757 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7759 /* Delete the last parameter. */
7760 unsigned int i;
7761 vec<tree, va_gc> *arg_vec;
7762 vec_alloc (arg_vec, 2);
7763 for (i = 0; i < 2; i++)
7764 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7765 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7766 /* FALLTHROUGH */
7768 case BUILT_IN_STRCMP:
7769 target = expand_builtin_strcmp (exp, target);
7770 if (target)
7771 return target;
7772 break;
7774 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7775 back to a BUILT_IN_STRNCMP. */
7776 case BUILT_IN_STRNCMP_EQ:
7777 target = expand_builtin_memcmp (exp, target, true);
7778 if (target)
7779 return target;
7781 /* Change it back to a BUILT_IN_STRNCMP. */
7782 TREE_OPERAND (exp, 1)
7783 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7784 /* FALLTHROUGH */
7786 case BUILT_IN_STRNCMP:
7787 target = expand_builtin_strncmp (exp, target, mode);
7788 if (target)
7789 return target;
7790 break;
7792 case BUILT_IN_BCMP:
7793 case BUILT_IN_MEMCMP:
7794 case BUILT_IN_MEMCMP_EQ:
7795 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7796 if (target)
7797 return target;
7798 if (fcode == BUILT_IN_MEMCMP_EQ)
7800 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7801 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7803 break;
7805 case BUILT_IN_SETJMP:
7806 /* This should have been lowered to the builtins below. */
7807 gcc_unreachable ();
7809 case BUILT_IN_SETJMP_SETUP:
7810 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7811 and the receiver label. */
7812 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7814 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7815 VOIDmode, EXPAND_NORMAL);
7816 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7817 rtx_insn *label_r = label_rtx (label);
7819 expand_builtin_setjmp_setup (buf_addr, label_r);
7820 return const0_rtx;
7822 break;
7824 case BUILT_IN_SETJMP_RECEIVER:
7825 /* __builtin_setjmp_receiver is passed the receiver label. */
7826 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7828 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7829 rtx_insn *label_r = label_rtx (label);
7831 expand_builtin_setjmp_receiver (label_r);
7832 nonlocal_goto_handler_labels
7833 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7834 nonlocal_goto_handler_labels);
7835 /* ??? Do not let expand_label treat us as such since we would
7836 not want to be both on the list of non-local labels and on
7837 the list of forced labels. */
7838 FORCED_LABEL (label) = 0;
7839 return const0_rtx;
7841 break;
7843 /* __builtin_longjmp is passed a pointer to an array of five words.
7844 It's similar to the C library longjmp function but works with
7845 __builtin_setjmp above. */
7846 case BUILT_IN_LONGJMP:
7847 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7849 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7850 VOIDmode, EXPAND_NORMAL);
7851 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7853 if (value != const1_rtx)
7855 error ("%<__builtin_longjmp%> second argument must be 1");
7856 return const0_rtx;
7859 expand_builtin_longjmp (buf_addr, value);
7860 return const0_rtx;
7862 break;
7864 case BUILT_IN_NONLOCAL_GOTO:
7865 target = expand_builtin_nonlocal_goto (exp);
7866 if (target)
7867 return target;
7868 break;
7870 /* This updates the setjmp buffer that is its argument with the value
7871 of the current stack pointer. */
7872 case BUILT_IN_UPDATE_SETJMP_BUF:
7873 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7875 rtx buf_addr
7876 = expand_normal (CALL_EXPR_ARG (exp, 0));
7878 expand_builtin_update_setjmp_buf (buf_addr);
7879 return const0_rtx;
7881 break;
7883 case BUILT_IN_TRAP:
7884 case BUILT_IN_UNREACHABLE_TRAP:
7885 expand_builtin_trap ();
7886 return const0_rtx;
7888 case BUILT_IN_UNREACHABLE:
7889 expand_builtin_unreachable ();
7890 return const0_rtx;
7892 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7893 case BUILT_IN_SIGNBITD32:
7894 case BUILT_IN_SIGNBITD64:
7895 case BUILT_IN_SIGNBITD128:
7896 target = expand_builtin_signbit (exp, target);
7897 if (target)
7898 return target;
7899 break;
7901 /* Various hooks for the DWARF 2 __throw routine. */
7902 case BUILT_IN_UNWIND_INIT:
7903 expand_builtin_unwind_init ();
7904 return const0_rtx;
7905 case BUILT_IN_DWARF_CFA:
7906 return virtual_cfa_rtx;
7907 #ifdef DWARF2_UNWIND_INFO
7908 case BUILT_IN_DWARF_SP_COLUMN:
7909 return expand_builtin_dwarf_sp_column ();
7910 case BUILT_IN_INIT_DWARF_REG_SIZES:
7911 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7912 return const0_rtx;
7913 #endif
7914 case BUILT_IN_FROB_RETURN_ADDR:
7915 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7916 case BUILT_IN_EXTRACT_RETURN_ADDR:
7917 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7918 case BUILT_IN_EH_RETURN:
7919 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7920 CALL_EXPR_ARG (exp, 1));
7921 return const0_rtx;
7922 case BUILT_IN_EH_RETURN_DATA_REGNO:
7923 return expand_builtin_eh_return_data_regno (exp);
7924 case BUILT_IN_EXTEND_POINTER:
7925 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7926 case BUILT_IN_EH_POINTER:
7927 return expand_builtin_eh_pointer (exp);
7928 case BUILT_IN_EH_FILTER:
7929 return expand_builtin_eh_filter (exp);
7930 case BUILT_IN_EH_COPY_VALUES:
7931 return expand_builtin_eh_copy_values (exp);
7933 case BUILT_IN_VA_START:
7934 return expand_builtin_va_start (exp);
7935 case BUILT_IN_VA_END:
7936 return expand_builtin_va_end (exp);
7937 case BUILT_IN_VA_COPY:
7938 return expand_builtin_va_copy (exp);
7939 case BUILT_IN_EXPECT:
7940 return expand_builtin_expect (exp, target);
7941 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7942 return expand_builtin_expect_with_probability (exp, target);
7943 case BUILT_IN_ASSUME_ALIGNED:
7944 return expand_builtin_assume_aligned (exp, target);
7945 case BUILT_IN_PREFETCH:
7946 expand_builtin_prefetch (exp);
7947 return const0_rtx;
7949 case BUILT_IN_INIT_TRAMPOLINE:
7950 return expand_builtin_init_trampoline (exp, true);
7951 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7952 return expand_builtin_init_trampoline (exp, false);
7953 case BUILT_IN_ADJUST_TRAMPOLINE:
7954 return expand_builtin_adjust_trampoline (exp);
7956 case BUILT_IN_INIT_DESCRIPTOR:
7957 return expand_builtin_init_descriptor (exp);
7958 case BUILT_IN_ADJUST_DESCRIPTOR:
7959 return expand_builtin_adjust_descriptor (exp);
7961 case BUILT_IN_FORK:
7962 case BUILT_IN_EXECL:
7963 case BUILT_IN_EXECV:
7964 case BUILT_IN_EXECLP:
7965 case BUILT_IN_EXECLE:
7966 case BUILT_IN_EXECVP:
7967 case BUILT_IN_EXECVE:
7968 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7974 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7975 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7976 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7977 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7979 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7980 if (target)
7981 return target;
7982 break;
7984 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7985 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7986 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7987 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7988 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7989 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7990 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7991 if (target)
7992 return target;
7993 break;
7995 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7996 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7997 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7998 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7999 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8001 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8002 if (target)
8003 return target;
8004 break;
8006 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8007 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8008 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8009 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8010 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8012 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8013 if (target)
8014 return target;
8015 break;
8017 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8018 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8019 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8020 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8021 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8022 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8023 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8024 if (target)
8025 return target;
8026 break;
8028 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8029 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8030 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8031 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8032 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8033 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8034 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8035 if (target)
8036 return target;
8037 break;
8039 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8040 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8041 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8042 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8043 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8044 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8045 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8046 if (target)
8047 return target;
8048 break;
8050 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8051 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8052 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8053 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8054 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8055 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8056 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8057 if (target)
8058 return target;
8059 break;
8061 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8062 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8063 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8064 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8065 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8066 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8067 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8068 if (target)
8069 return target;
8070 break;
8072 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8073 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8074 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8075 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8076 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8077 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8078 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8079 if (target)
8080 return target;
8081 break;
8083 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8084 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8085 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8086 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8087 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8088 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8089 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8090 if (target)
8091 return target;
8092 break;
8094 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8095 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8096 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8097 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8098 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8099 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8100 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8101 if (target)
8102 return target;
8103 break;
8105 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8106 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8107 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8108 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8109 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8110 if (mode == VOIDmode)
8111 mode = TYPE_MODE (boolean_type_node);
8112 if (!target || !register_operand (target, mode))
8113 target = gen_reg_rtx (mode);
8115 mode = get_builtin_sync_mode
8116 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8117 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8118 if (target)
8119 return target;
8120 break;
8122 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8123 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8124 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8125 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8126 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8127 mode = get_builtin_sync_mode
8128 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8129 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8130 if (target)
8131 return target;
8132 break;
8134 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8135 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8136 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8137 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8138 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8139 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8140 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8141 if (target)
8142 return target;
8143 break;
8145 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8146 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8147 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8148 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8149 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8150 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8151 expand_builtin_sync_lock_release (mode, exp);
8152 return const0_rtx;
8154 case BUILT_IN_SYNC_SYNCHRONIZE:
8155 expand_builtin_sync_synchronize ();
8156 return const0_rtx;
8158 case BUILT_IN_ATOMIC_EXCHANGE_1:
8159 case BUILT_IN_ATOMIC_EXCHANGE_2:
8160 case BUILT_IN_ATOMIC_EXCHANGE_4:
8161 case BUILT_IN_ATOMIC_EXCHANGE_8:
8162 case BUILT_IN_ATOMIC_EXCHANGE_16:
8163 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8164 target = expand_builtin_atomic_exchange (mode, exp, target);
8165 if (target)
8166 return target;
8167 break;
8169 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8170 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8171 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8172 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8173 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8175 unsigned int nargs, z;
8176 vec<tree, va_gc> *vec;
8178 mode =
8179 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8180 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8181 if (target)
8182 return target;
8184 /* If this is turned into an external library call, the weak parameter
8185 must be dropped to match the expected parameter list. */
8186 nargs = call_expr_nargs (exp);
8187 vec_alloc (vec, nargs - 1);
8188 for (z = 0; z < 3; z++)
8189 vec->quick_push (CALL_EXPR_ARG (exp, z));
8190 /* Skip the boolean weak parameter. */
8191 for (z = 4; z < 6; z++)
8192 vec->quick_push (CALL_EXPR_ARG (exp, z));
8193 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8194 break;
8197 case BUILT_IN_ATOMIC_LOAD_1:
8198 case BUILT_IN_ATOMIC_LOAD_2:
8199 case BUILT_IN_ATOMIC_LOAD_4:
8200 case BUILT_IN_ATOMIC_LOAD_8:
8201 case BUILT_IN_ATOMIC_LOAD_16:
8202 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8203 target = expand_builtin_atomic_load (mode, exp, target);
8204 if (target)
8205 return target;
8206 break;
8208 case BUILT_IN_ATOMIC_STORE_1:
8209 case BUILT_IN_ATOMIC_STORE_2:
8210 case BUILT_IN_ATOMIC_STORE_4:
8211 case BUILT_IN_ATOMIC_STORE_8:
8212 case BUILT_IN_ATOMIC_STORE_16:
8213 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8214 target = expand_builtin_atomic_store (mode, exp);
8215 if (target)
8216 return const0_rtx;
8217 break;
8219 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8220 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8221 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8222 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8223 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8225 enum built_in_function lib;
8226 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8227 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8228 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8229 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8230 ignore, lib);
8231 if (target)
8232 return target;
8233 break;
8235 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8236 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8237 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8238 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8239 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8241 enum built_in_function lib;
8242 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8243 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8244 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8245 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8246 ignore, lib);
8247 if (target)
8248 return target;
8249 break;
8251 case BUILT_IN_ATOMIC_AND_FETCH_1:
8252 case BUILT_IN_ATOMIC_AND_FETCH_2:
8253 case BUILT_IN_ATOMIC_AND_FETCH_4:
8254 case BUILT_IN_ATOMIC_AND_FETCH_8:
8255 case BUILT_IN_ATOMIC_AND_FETCH_16:
8257 enum built_in_function lib;
8258 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8259 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8260 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8261 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8262 ignore, lib);
8263 if (target)
8264 return target;
8265 break;
8267 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8268 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8269 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8270 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8271 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8273 enum built_in_function lib;
8274 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8275 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8276 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8277 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8278 ignore, lib);
8279 if (target)
8280 return target;
8281 break;
8283 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8284 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8285 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8286 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8287 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8289 enum built_in_function lib;
8290 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8291 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8292 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8293 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8294 ignore, lib);
8295 if (target)
8296 return target;
8297 break;
8299 case BUILT_IN_ATOMIC_OR_FETCH_1:
8300 case BUILT_IN_ATOMIC_OR_FETCH_2:
8301 case BUILT_IN_ATOMIC_OR_FETCH_4:
8302 case BUILT_IN_ATOMIC_OR_FETCH_8:
8303 case BUILT_IN_ATOMIC_OR_FETCH_16:
8305 enum built_in_function lib;
8306 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8307 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8308 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8309 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8310 ignore, lib);
8311 if (target)
8312 return target;
8313 break;
8315 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8316 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8317 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8318 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8319 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8320 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8321 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8322 ignore, BUILT_IN_NONE);
8323 if (target)
8324 return target;
8325 break;
8327 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8328 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8329 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8330 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8331 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8332 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8333 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8334 ignore, BUILT_IN_NONE);
8335 if (target)
8336 return target;
8337 break;
8339 case BUILT_IN_ATOMIC_FETCH_AND_1:
8340 case BUILT_IN_ATOMIC_FETCH_AND_2:
8341 case BUILT_IN_ATOMIC_FETCH_AND_4:
8342 case BUILT_IN_ATOMIC_FETCH_AND_8:
8343 case BUILT_IN_ATOMIC_FETCH_AND_16:
8344 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8345 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8346 ignore, BUILT_IN_NONE);
8347 if (target)
8348 return target;
8349 break;
8351 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8352 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8353 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8354 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8355 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8356 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8357 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8358 ignore, BUILT_IN_NONE);
8359 if (target)
8360 return target;
8361 break;
8363 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8364 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8365 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8366 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8367 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8368 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8369 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8370 ignore, BUILT_IN_NONE);
8371 if (target)
8372 return target;
8373 break;
8375 case BUILT_IN_ATOMIC_FETCH_OR_1:
8376 case BUILT_IN_ATOMIC_FETCH_OR_2:
8377 case BUILT_IN_ATOMIC_FETCH_OR_4:
8378 case BUILT_IN_ATOMIC_FETCH_OR_8:
8379 case BUILT_IN_ATOMIC_FETCH_OR_16:
8380 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8381 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8382 ignore, BUILT_IN_NONE);
8383 if (target)
8384 return target;
8385 break;
8387 case BUILT_IN_ATOMIC_TEST_AND_SET:
8388 return expand_builtin_atomic_test_and_set (exp, target);
8390 case BUILT_IN_ATOMIC_CLEAR:
8391 return expand_builtin_atomic_clear (exp);
8393 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8394 return expand_builtin_atomic_always_lock_free (exp);
8396 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8397 target = expand_builtin_atomic_is_lock_free (exp);
8398 if (target)
8399 return target;
8400 break;
8402 case BUILT_IN_ATOMIC_THREAD_FENCE:
8403 expand_builtin_atomic_thread_fence (exp);
8404 return const0_rtx;
8406 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8407 expand_builtin_atomic_signal_fence (exp);
8408 return const0_rtx;
8410 case BUILT_IN_OBJECT_SIZE:
8411 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8412 return expand_builtin_object_size (exp);
8414 case BUILT_IN_MEMCPY_CHK:
8415 case BUILT_IN_MEMPCPY_CHK:
8416 case BUILT_IN_MEMMOVE_CHK:
8417 case BUILT_IN_MEMSET_CHK:
8418 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8419 if (target)
8420 return target;
8421 break;
8423 case BUILT_IN_STRCPY_CHK:
8424 case BUILT_IN_STPCPY_CHK:
8425 case BUILT_IN_STRNCPY_CHK:
8426 case BUILT_IN_STPNCPY_CHK:
8427 case BUILT_IN_STRCAT_CHK:
8428 case BUILT_IN_STRNCAT_CHK:
8429 case BUILT_IN_SNPRINTF_CHK:
8430 case BUILT_IN_VSNPRINTF_CHK:
8431 maybe_emit_chk_warning (exp, fcode);
8432 break;
8434 case BUILT_IN_SPRINTF_CHK:
8435 case BUILT_IN_VSPRINTF_CHK:
8436 maybe_emit_sprintf_chk_warning (exp, fcode);
8437 break;
8439 case BUILT_IN_THREAD_POINTER:
8440 return expand_builtin_thread_pointer (exp, target);
8442 case BUILT_IN_SET_THREAD_POINTER:
8443 expand_builtin_set_thread_pointer (exp);
8444 return const0_rtx;
8446 case BUILT_IN_ACC_ON_DEVICE:
8447 /* Do library call, if we failed to expand the builtin when
8448 folding. */
8449 break;
8451 case BUILT_IN_GOACC_PARLEVEL_ID:
8452 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8453 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8455 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8456 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8458 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8459 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8460 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8461 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8462 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8464 return expand_speculation_safe_value (mode, exp, target, ignore);
8466 default: /* just do library call, if unknown builtin */
8467 break;
8470 /* The switch statement above can drop through to cause the function
8471 to be called normally. */
8472 return expand_call (exp, target, ignore);
8475 /* Determine whether a tree node represents a call to a built-in
8476 function. If the tree T is a call to a built-in function with
8477 the right number of arguments of the appropriate types, return
8478 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8479 Otherwise the return value is END_BUILTINS. */
8481 enum built_in_function
8482 builtin_mathfn_code (const_tree t)
8484 const_tree fndecl, arg, parmlist;
8485 const_tree argtype, parmtype;
8486 const_call_expr_arg_iterator iter;
8488 if (TREE_CODE (t) != CALL_EXPR)
8489 return END_BUILTINS;
8491 fndecl = get_callee_fndecl (t);
8492 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8493 return END_BUILTINS;
8495 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8496 init_const_call_expr_arg_iterator (t, &iter);
8497 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8499 /* If a function doesn't take a variable number of arguments,
8500 the last element in the list will have type `void'. */
8501 parmtype = TREE_VALUE (parmlist);
8502 if (VOID_TYPE_P (parmtype))
8504 if (more_const_call_expr_args_p (&iter))
8505 return END_BUILTINS;
8506 return DECL_FUNCTION_CODE (fndecl);
8509 if (! more_const_call_expr_args_p (&iter))
8510 return END_BUILTINS;
8512 arg = next_const_call_expr_arg (&iter);
8513 argtype = TREE_TYPE (arg);
8515 if (SCALAR_FLOAT_TYPE_P (parmtype))
8517 if (! SCALAR_FLOAT_TYPE_P (argtype))
8518 return END_BUILTINS;
8520 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8522 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8523 return END_BUILTINS;
8525 else if (POINTER_TYPE_P (parmtype))
8527 if (! POINTER_TYPE_P (argtype))
8528 return END_BUILTINS;
8530 else if (INTEGRAL_TYPE_P (parmtype))
8532 if (! INTEGRAL_TYPE_P (argtype))
8533 return END_BUILTINS;
8535 else
8536 return END_BUILTINS;
8539 /* Variable-length argument list. */
8540 return DECL_FUNCTION_CODE (fndecl);
8543 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8544 evaluate to a constant. */
8546 static tree
8547 fold_builtin_constant_p (tree arg)
8549 /* We return 1 for a numeric type that's known to be a constant
8550 value at compile-time or for an aggregate type that's a
8551 literal constant. */
8552 STRIP_NOPS (arg);
8554 /* If we know this is a constant, emit the constant of one. */
8555 if (CONSTANT_CLASS_P (arg)
8556 || (TREE_CODE (arg) == CONSTRUCTOR
8557 && TREE_CONSTANT (arg)))
8558 return integer_one_node;
8559 if (TREE_CODE (arg) == ADDR_EXPR)
8561 tree op = TREE_OPERAND (arg, 0);
8562 if (TREE_CODE (op) == STRING_CST
8563 || (TREE_CODE (op) == ARRAY_REF
8564 && integer_zerop (TREE_OPERAND (op, 1))
8565 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8566 return integer_one_node;
8569 /* If this expression has side effects, show we don't know it to be a
8570 constant. Likewise if it's a pointer or aggregate type since in
8571 those case we only want literals, since those are only optimized
8572 when generating RTL, not later.
8573 And finally, if we are compiling an initializer, not code, we
8574 need to return a definite result now; there's not going to be any
8575 more optimization done. */
8576 if (TREE_SIDE_EFFECTS (arg)
8577 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8578 || POINTER_TYPE_P (TREE_TYPE (arg))
8579 || cfun == 0
8580 || folding_initializer
8581 || force_folding_builtin_constant_p)
8582 return integer_zero_node;
8584 return NULL_TREE;
8587 /* Create builtin_expect or builtin_expect_with_probability
8588 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8589 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8590 builtin_expect_with_probability instead uses third argument as PROBABILITY
8591 value. */
8593 static tree
8594 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8595 tree predictor, tree probability)
8597 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8599 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8600 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8601 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8602 ret_type = TREE_TYPE (TREE_TYPE (fn));
8603 pred_type = TREE_VALUE (arg_types);
8604 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8606 pred = fold_convert_loc (loc, pred_type, pred);
8607 expected = fold_convert_loc (loc, expected_type, expected);
8609 if (probability)
8610 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8611 else
8612 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8613 predictor);
8615 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8616 build_int_cst (ret_type, 0));
8619 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8620 NULL_TREE if no simplification is possible. */
8622 tree
8623 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8624 tree arg3)
8626 tree inner, fndecl, inner_arg0;
8627 enum tree_code code;
8629 /* Distribute the expected value over short-circuiting operators.
8630 See through the cast from truthvalue_type_node to long. */
8631 inner_arg0 = arg0;
8632 while (CONVERT_EXPR_P (inner_arg0)
8633 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8634 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8635 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8637 /* If this is a builtin_expect within a builtin_expect keep the
8638 inner one. See through a comparison against a constant. It
8639 might have been added to create a thruthvalue. */
8640 inner = inner_arg0;
8642 if (COMPARISON_CLASS_P (inner)
8643 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8644 inner = TREE_OPERAND (inner, 0);
8646 if (TREE_CODE (inner) == CALL_EXPR
8647 && (fndecl = get_callee_fndecl (inner))
8648 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8649 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8650 return arg0;
8652 inner = inner_arg0;
8653 code = TREE_CODE (inner);
8654 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8656 tree op0 = TREE_OPERAND (inner, 0);
8657 tree op1 = TREE_OPERAND (inner, 1);
8658 arg1 = save_expr (arg1);
8660 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8661 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8662 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8664 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8667 /* If the argument isn't invariant then there's nothing else we can do. */
8668 if (!TREE_CONSTANT (inner_arg0))
8669 return NULL_TREE;
8671 /* If we expect that a comparison against the argument will fold to
8672 a constant return the constant. In practice, this means a true
8673 constant or the address of a non-weak symbol. */
8674 inner = inner_arg0;
8675 STRIP_NOPS (inner);
8676 if (TREE_CODE (inner) == ADDR_EXPR)
8680 inner = TREE_OPERAND (inner, 0);
8682 while (TREE_CODE (inner) == COMPONENT_REF
8683 || TREE_CODE (inner) == ARRAY_REF);
8684 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8685 return NULL_TREE;
8688 /* Otherwise, ARG0 already has the proper type for the return value. */
8689 return arg0;
8692 /* Fold a call to __builtin_classify_type with argument ARG. */
8694 static tree
8695 fold_builtin_classify_type (tree arg)
8697 if (arg == 0)
8698 return build_int_cst (integer_type_node, no_type_class);
8700 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8703 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8704 ARG. */
8706 static tree
8707 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8709 if (!validate_arg (arg, POINTER_TYPE))
8710 return NULL_TREE;
8711 else
8713 c_strlen_data lendata = { };
8714 tree len = c_strlen (arg, 0, &lendata);
8716 if (len)
8717 return fold_convert_loc (loc, type, len);
8719 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8720 also early enough to detect invalid reads in multimensional
8721 arrays and struct members. */
8722 if (!lendata.decl)
8723 c_strlen (arg, 1, &lendata);
8725 if (lendata.decl)
8727 if (EXPR_HAS_LOCATION (arg))
8728 loc = EXPR_LOCATION (arg);
8729 else if (loc == UNKNOWN_LOCATION)
8730 loc = input_location;
8731 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8734 return NULL_TREE;
8738 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8740 static tree
8741 fold_builtin_inf (location_t loc, tree type, int warn)
8743 /* __builtin_inff is intended to be usable to define INFINITY on all
8744 targets. If an infinity is not available, INFINITY expands "to a
8745 positive constant of type float that overflows at translation
8746 time", footnote "In this case, using INFINITY will violate the
8747 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8748 Thus we pedwarn to ensure this constraint violation is
8749 diagnosed. */
8750 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8751 pedwarn (loc, 0, "target format does not support infinity");
8753 return build_real (type, dconstinf);
8756 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8757 NULL_TREE if no simplification can be made. */
8759 static tree
8760 fold_builtin_sincos (location_t loc,
8761 tree arg0, tree arg1, tree arg2)
8763 tree type;
8764 tree fndecl, call = NULL_TREE;
8766 if (!validate_arg (arg0, REAL_TYPE)
8767 || !validate_arg (arg1, POINTER_TYPE)
8768 || !validate_arg (arg2, POINTER_TYPE))
8769 return NULL_TREE;
8771 type = TREE_TYPE (arg0);
8773 /* Calculate the result when the argument is a constant. */
8774 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8775 if (fn == END_BUILTINS)
8776 return NULL_TREE;
8778 /* Canonicalize sincos to cexpi. */
8779 if (TREE_CODE (arg0) == REAL_CST)
8781 tree complex_type = build_complex_type (type);
8782 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8784 if (!call)
8786 if (!targetm.libc_has_function (function_c99_math_complex, type)
8787 || !builtin_decl_implicit_p (fn))
8788 return NULL_TREE;
8789 fndecl = builtin_decl_explicit (fn);
8790 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8791 call = builtin_save_expr (call);
8794 tree ptype = build_pointer_type (type);
8795 arg1 = fold_convert (ptype, arg1);
8796 arg2 = fold_convert (ptype, arg2);
8797 return build2 (COMPOUND_EXPR, void_type_node,
8798 build2 (MODIFY_EXPR, void_type_node,
8799 build_fold_indirect_ref_loc (loc, arg1),
8800 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8801 build2 (MODIFY_EXPR, void_type_node,
8802 build_fold_indirect_ref_loc (loc, arg2),
8803 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8806 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8807 Return NULL_TREE if no simplification can be made. */
8809 static tree
8810 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8812 if (!validate_arg (arg1, POINTER_TYPE)
8813 || !validate_arg (arg2, POINTER_TYPE)
8814 || !validate_arg (len, INTEGER_TYPE))
8815 return NULL_TREE;
8817 /* If the LEN parameter is zero, return zero. */
8818 if (integer_zerop (len))
8819 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8820 arg1, arg2);
8822 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8823 if (operand_equal_p (arg1, arg2, 0))
8824 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8826 /* If len parameter is one, return an expression corresponding to
8827 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8828 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8830 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8831 tree cst_uchar_ptr_node
8832 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8834 tree ind1
8835 = fold_convert_loc (loc, integer_type_node,
8836 build1 (INDIRECT_REF, cst_uchar_node,
8837 fold_convert_loc (loc,
8838 cst_uchar_ptr_node,
8839 arg1)));
8840 tree ind2
8841 = fold_convert_loc (loc, integer_type_node,
8842 build1 (INDIRECT_REF, cst_uchar_node,
8843 fold_convert_loc (loc,
8844 cst_uchar_ptr_node,
8845 arg2)));
8846 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8849 return NULL_TREE;
8852 /* Fold a call to builtin isascii with argument ARG. */
8854 static tree
8855 fold_builtin_isascii (location_t loc, tree arg)
8857 if (!validate_arg (arg, INTEGER_TYPE))
8858 return NULL_TREE;
8859 else
8861 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8862 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8863 build_int_cst (integer_type_node,
8864 ~ (unsigned HOST_WIDE_INT) 0x7f));
8865 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8866 arg, integer_zero_node);
8870 /* Fold a call to builtin toascii with argument ARG. */
8872 static tree
8873 fold_builtin_toascii (location_t loc, tree arg)
8875 if (!validate_arg (arg, INTEGER_TYPE))
8876 return NULL_TREE;
8878 /* Transform toascii(c) -> (c & 0x7f). */
8879 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8880 build_int_cst (integer_type_node, 0x7f));
8883 /* Fold a call to builtin isdigit with argument ARG. */
8885 static tree
8886 fold_builtin_isdigit (location_t loc, tree arg)
8888 if (!validate_arg (arg, INTEGER_TYPE))
8889 return NULL_TREE;
8890 else
8892 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8893 /* According to the C standard, isdigit is unaffected by locale.
8894 However, it definitely is affected by the target character set. */
8895 unsigned HOST_WIDE_INT target_digit0
8896 = lang_hooks.to_target_charset ('0');
8898 if (target_digit0 == 0)
8899 return NULL_TREE;
8901 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8902 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8903 build_int_cst (unsigned_type_node, target_digit0));
8904 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8905 build_int_cst (unsigned_type_node, 9));
8909 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8911 static tree
8912 fold_builtin_fabs (location_t loc, tree arg, tree type)
8914 if (!validate_arg (arg, REAL_TYPE))
8915 return NULL_TREE;
8917 arg = fold_convert_loc (loc, type, arg);
8918 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8921 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8923 static tree
8924 fold_builtin_abs (location_t loc, tree arg, tree type)
8926 if (!validate_arg (arg, INTEGER_TYPE))
8927 return NULL_TREE;
8929 arg = fold_convert_loc (loc, type, arg);
8930 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8933 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8935 static tree
8936 fold_builtin_carg (location_t loc, tree arg, tree type)
8938 if (validate_arg (arg, COMPLEX_TYPE)
8939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8941 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8943 if (atan2_fn)
8945 tree new_arg = builtin_save_expr (arg);
8946 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8947 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8948 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8952 return NULL_TREE;
8955 /* Fold a call to builtin frexp, we can assume the base is 2. */
8957 static tree
8958 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8960 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8961 return NULL_TREE;
8963 STRIP_NOPS (arg0);
8965 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8966 return NULL_TREE;
8968 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8970 /* Proceed if a valid pointer type was passed in. */
8971 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8973 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8974 tree frac, exp, res;
8976 switch (value->cl)
8978 case rvc_zero:
8979 /* For +-0, return (*exp = 0, +-0). */
8980 exp = integer_zero_node;
8981 frac = arg0;
8982 break;
8983 case rvc_nan:
8984 case rvc_inf:
8985 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8986 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8987 case rvc_normal:
8989 /* Since the frexp function always expects base 2, and in
8990 GCC normalized significands are already in the range
8991 [0.5, 1.0), we have exactly what frexp wants. */
8992 REAL_VALUE_TYPE frac_rvt = *value;
8993 SET_REAL_EXP (&frac_rvt, 0);
8994 frac = build_real (rettype, frac_rvt);
8995 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8997 break;
8998 default:
8999 gcc_unreachable ();
9002 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9003 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9004 TREE_SIDE_EFFECTS (arg1) = 1;
9005 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9006 suppress_warning (res, OPT_Wunused_value);
9007 return res;
9010 return NULL_TREE;
9013 /* Fold a call to builtin modf. */
9015 static tree
9016 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9018 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9019 return NULL_TREE;
9021 STRIP_NOPS (arg0);
9023 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9024 return NULL_TREE;
9026 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9028 /* Proceed if a valid pointer type was passed in. */
9029 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9031 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9032 REAL_VALUE_TYPE trunc, frac;
9033 tree res;
9035 switch (value->cl)
9037 case rvc_nan:
9038 case rvc_zero:
9039 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9040 trunc = frac = *value;
9041 break;
9042 case rvc_inf:
9043 /* For +-Inf, return (*arg1 = arg0, +-0). */
9044 frac = dconst0;
9045 frac.sign = value->sign;
9046 trunc = *value;
9047 break;
9048 case rvc_normal:
9049 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9050 real_trunc (&trunc, VOIDmode, value);
9051 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9052 /* If the original number was negative and already
9053 integral, then the fractional part is -0.0. */
9054 if (value->sign && frac.cl == rvc_zero)
9055 frac.sign = value->sign;
9056 break;
9059 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9060 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9061 build_real (rettype, trunc));
9062 TREE_SIDE_EFFECTS (arg1) = 1;
9063 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9064 build_real (rettype, frac));
9065 suppress_warning (res, OPT_Wunused_value);
9066 return res;
9069 return NULL_TREE;
9072 /* Given a location LOC, an interclass builtin function decl FNDECL
9073 and its single argument ARG, return an folded expression computing
9074 the same, or NULL_TREE if we either couldn't or didn't want to fold
9075 (the latter happen if there's an RTL instruction available). */
9077 static tree
9078 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9080 machine_mode mode;
9082 if (!validate_arg (arg, REAL_TYPE))
9083 return NULL_TREE;
9085 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9086 return NULL_TREE;
9088 mode = TYPE_MODE (TREE_TYPE (arg));
9090 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9092 /* If there is no optab, try generic code. */
9093 switch (DECL_FUNCTION_CODE (fndecl))
9095 tree result;
9097 CASE_FLT_FN (BUILT_IN_ISINF):
9099 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9100 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9101 tree type = TREE_TYPE (arg);
9102 REAL_VALUE_TYPE r;
9103 char buf[128];
9105 if (is_ibm_extended)
9107 /* NaN and Inf are encoded in the high-order double value
9108 only. The low-order value is not significant. */
9109 type = double_type_node;
9110 mode = DFmode;
9111 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9113 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9114 real_from_string (&r, buf);
9115 result = build_call_expr (isgr_fn, 2,
9116 fold_build1_loc (loc, ABS_EXPR, type, arg),
9117 build_real (type, r));
9118 return result;
9120 CASE_FLT_FN (BUILT_IN_FINITE):
9121 case BUILT_IN_ISFINITE:
9123 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9124 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9125 tree type = TREE_TYPE (arg);
9126 REAL_VALUE_TYPE r;
9127 char buf[128];
9129 if (is_ibm_extended)
9131 /* NaN and Inf are encoded in the high-order double value
9132 only. The low-order value is not significant. */
9133 type = double_type_node;
9134 mode = DFmode;
9135 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9137 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9138 real_from_string (&r, buf);
9139 result = build_call_expr (isle_fn, 2,
9140 fold_build1_loc (loc, ABS_EXPR, type, arg),
9141 build_real (type, r));
9142 /*result = fold_build2_loc (loc, UNGT_EXPR,
9143 TREE_TYPE (TREE_TYPE (fndecl)),
9144 fold_build1_loc (loc, ABS_EXPR, type, arg),
9145 build_real (type, r));
9146 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9147 TREE_TYPE (TREE_TYPE (fndecl)),
9148 result);*/
9149 return result;
9151 case BUILT_IN_ISNORMAL:
9153 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9154 islessequal(fabs(x),DBL_MAX). */
9155 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9156 tree type = TREE_TYPE (arg);
9157 tree orig_arg, max_exp, min_exp;
9158 machine_mode orig_mode = mode;
9159 REAL_VALUE_TYPE rmax, rmin;
9160 char buf[128];
9162 orig_arg = arg = builtin_save_expr (arg);
9163 if (is_ibm_extended)
9165 /* Use double to test the normal range of IBM extended
9166 precision. Emin for IBM extended precision is
9167 different to emin for IEEE double, being 53 higher
9168 since the low double exponent is at least 53 lower
9169 than the high double exponent. */
9170 type = double_type_node;
9171 mode = DFmode;
9172 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9174 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9176 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9177 real_from_string (&rmax, buf);
9178 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9179 real_from_string (&rmin, buf);
9180 max_exp = build_real (type, rmax);
9181 min_exp = build_real (type, rmin);
9183 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9184 if (is_ibm_extended)
9186 /* Testing the high end of the range is done just using
9187 the high double, using the same test as isfinite().
9188 For the subnormal end of the range we first test the
9189 high double, then if its magnitude is equal to the
9190 limit of 0x1p-969, we test whether the low double is
9191 non-zero and opposite sign to the high double. */
9192 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9193 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9194 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9195 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9196 arg, min_exp);
9197 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9198 complex_double_type_node, orig_arg);
9199 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9200 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9201 tree zero = build_real (type, dconst0);
9202 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9203 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9204 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9205 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9206 fold_build3 (COND_EXPR,
9207 integer_type_node,
9208 hilt, logt, lolt));
9209 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9210 eq_min, ok_lo);
9211 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9212 gt_min, eq_min);
9214 else
9216 tree const isge_fn
9217 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9218 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9220 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9221 max_exp, min_exp);
9222 return result;
9224 default:
9225 break;
9228 return NULL_TREE;
9231 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9232 ARG is the argument for the call. */
9234 static tree
9235 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9239 if (!validate_arg (arg, REAL_TYPE))
9240 return NULL_TREE;
9242 switch (builtin_index)
9244 case BUILT_IN_ISINF:
9245 if (tree_expr_infinite_p (arg))
9246 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9247 if (!tree_expr_maybe_infinite_p (arg))
9248 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9249 return NULL_TREE;
9251 case BUILT_IN_ISINF_SIGN:
9253 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9254 /* In a boolean context, GCC will fold the inner COND_EXPR to
9255 1. So e.g. "if (isinf_sign(x))" would be folded to just
9256 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9257 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9258 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9259 tree tmp = NULL_TREE;
9261 arg = builtin_save_expr (arg);
9263 if (signbit_fn && isinf_fn)
9265 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9266 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9268 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9269 signbit_call, integer_zero_node);
9270 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9271 isinf_call, integer_zero_node);
9273 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9274 integer_minus_one_node, integer_one_node);
9275 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9276 isinf_call, tmp,
9277 integer_zero_node);
9280 return tmp;
9283 case BUILT_IN_ISFINITE:
9284 if (tree_expr_finite_p (arg))
9285 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9286 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9287 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9288 return NULL_TREE;
9290 case BUILT_IN_ISNAN:
9291 if (tree_expr_nan_p (arg))
9292 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9293 if (!tree_expr_maybe_nan_p (arg))
9294 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9297 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9298 if (is_ibm_extended)
9300 /* NaN and Inf are encoded in the high-order double value
9301 only. The low-order value is not significant. */
9302 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9305 arg = builtin_save_expr (arg);
9306 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9308 case BUILT_IN_ISSIGNALING:
9309 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9310 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9311 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9312 here, so there is some possibility of __builtin_issignaling working
9313 without -fsignaling-nans. Especially when -fno-signaling-nans is
9314 the default. */
9315 if (!tree_expr_maybe_nan_p (arg))
9316 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9317 return NULL_TREE;
9319 default:
9320 gcc_unreachable ();
9324 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9325 This builtin will generate code to return the appropriate floating
9326 point classification depending on the value of the floating point
9327 number passed in. The possible return values must be supplied as
9328 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9329 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9330 one floating point argument which is "type generic". */
9332 static tree
9333 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9335 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9336 arg, type, res, tmp;
9337 machine_mode mode;
9338 REAL_VALUE_TYPE r;
9339 char buf[128];
9341 /* Verify the required arguments in the original call. */
9342 if (nargs != 6
9343 || !validate_arg (args[0], INTEGER_TYPE)
9344 || !validate_arg (args[1], INTEGER_TYPE)
9345 || !validate_arg (args[2], INTEGER_TYPE)
9346 || !validate_arg (args[3], INTEGER_TYPE)
9347 || !validate_arg (args[4], INTEGER_TYPE)
9348 || !validate_arg (args[5], REAL_TYPE))
9349 return NULL_TREE;
9351 fp_nan = args[0];
9352 fp_infinite = args[1];
9353 fp_normal = args[2];
9354 fp_subnormal = args[3];
9355 fp_zero = args[4];
9356 arg = args[5];
9357 type = TREE_TYPE (arg);
9358 mode = TYPE_MODE (type);
9359 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9361 /* fpclassify(x) ->
9362 isnan(x) ? FP_NAN :
9363 (fabs(x) == Inf ? FP_INFINITE :
9364 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9365 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9367 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9368 build_real (type, dconst0));
9369 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9370 tmp, fp_zero, fp_subnormal);
9372 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9373 real_from_string (&r, buf);
9374 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9375 arg, build_real (type, r));
9376 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9378 if (tree_expr_maybe_infinite_p (arg))
9380 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9381 build_real (type, dconstinf));
9382 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9383 fp_infinite, res);
9386 if (tree_expr_maybe_nan_p (arg))
9388 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9389 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9392 return res;
9395 /* Fold a call to an unordered comparison function such as
9396 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9397 being called and ARG0 and ARG1 are the arguments for the call.
9398 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9399 the opposite of the desired result. UNORDERED_CODE is used
9400 for modes that can hold NaNs and ORDERED_CODE is used for
9401 the rest. */
9403 static tree
9404 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9405 enum tree_code unordered_code,
9406 enum tree_code ordered_code)
9408 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9409 enum tree_code code;
9410 tree type0, type1;
9411 enum tree_code code0, code1;
9412 tree cmp_type = NULL_TREE;
9414 type0 = TREE_TYPE (arg0);
9415 type1 = TREE_TYPE (arg1);
9417 code0 = TREE_CODE (type0);
9418 code1 = TREE_CODE (type1);
9420 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9421 /* Choose the wider of two real types. */
9422 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9423 ? type0 : type1;
9424 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9425 cmp_type = type0;
9426 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9427 cmp_type = type1;
9429 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9430 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9432 if (unordered_code == UNORDERED_EXPR)
9434 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9435 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9436 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9437 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9438 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9441 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9442 ? unordered_code : ordered_code;
9443 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9444 fold_build2_loc (loc, code, type, arg0, arg1));
9447 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9448 arithmetics if it can never overflow, or into internal functions that
9449 return both result of arithmetics and overflowed boolean flag in
9450 a complex integer result, or some other check for overflow.
9451 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9452 checking part of that. */
9454 static tree
9455 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9456 tree arg0, tree arg1, tree arg2)
9458 enum internal_fn ifn = IFN_LAST;
9459 /* The code of the expression corresponding to the built-in. */
9460 enum tree_code opcode = ERROR_MARK;
9461 bool ovf_only = false;
9463 switch (fcode)
9465 case BUILT_IN_ADD_OVERFLOW_P:
9466 ovf_only = true;
9467 /* FALLTHRU */
9468 case BUILT_IN_ADD_OVERFLOW:
9469 case BUILT_IN_SADD_OVERFLOW:
9470 case BUILT_IN_SADDL_OVERFLOW:
9471 case BUILT_IN_SADDLL_OVERFLOW:
9472 case BUILT_IN_UADD_OVERFLOW:
9473 case BUILT_IN_UADDL_OVERFLOW:
9474 case BUILT_IN_UADDLL_OVERFLOW:
9475 opcode = PLUS_EXPR;
9476 ifn = IFN_ADD_OVERFLOW;
9477 break;
9478 case BUILT_IN_SUB_OVERFLOW_P:
9479 ovf_only = true;
9480 /* FALLTHRU */
9481 case BUILT_IN_SUB_OVERFLOW:
9482 case BUILT_IN_SSUB_OVERFLOW:
9483 case BUILT_IN_SSUBL_OVERFLOW:
9484 case BUILT_IN_SSUBLL_OVERFLOW:
9485 case BUILT_IN_USUB_OVERFLOW:
9486 case BUILT_IN_USUBL_OVERFLOW:
9487 case BUILT_IN_USUBLL_OVERFLOW:
9488 opcode = MINUS_EXPR;
9489 ifn = IFN_SUB_OVERFLOW;
9490 break;
9491 case BUILT_IN_MUL_OVERFLOW_P:
9492 ovf_only = true;
9493 /* FALLTHRU */
9494 case BUILT_IN_MUL_OVERFLOW:
9495 case BUILT_IN_SMUL_OVERFLOW:
9496 case BUILT_IN_SMULL_OVERFLOW:
9497 case BUILT_IN_SMULLL_OVERFLOW:
9498 case BUILT_IN_UMUL_OVERFLOW:
9499 case BUILT_IN_UMULL_OVERFLOW:
9500 case BUILT_IN_UMULLL_OVERFLOW:
9501 opcode = MULT_EXPR;
9502 ifn = IFN_MUL_OVERFLOW;
9503 break;
9504 default:
9505 gcc_unreachable ();
9508 /* For the "generic" overloads, the first two arguments can have different
9509 types and the last argument determines the target type to use to check
9510 for overflow. The arguments of the other overloads all have the same
9511 type. */
9512 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9514 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9515 arguments are constant, attempt to fold the built-in call into a constant
9516 expression indicating whether or not it detected an overflow. */
9517 if (ovf_only
9518 && TREE_CODE (arg0) == INTEGER_CST
9519 && TREE_CODE (arg1) == INTEGER_CST)
9520 /* Perform the computation in the target type and check for overflow. */
9521 return omit_one_operand_loc (loc, boolean_type_node,
9522 arith_overflowed_p (opcode, type, arg0, arg1)
9523 ? boolean_true_node : boolean_false_node,
9524 arg2);
9526 tree intres, ovfres;
9527 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9529 intres = fold_binary_loc (loc, opcode, type,
9530 fold_convert_loc (loc, type, arg0),
9531 fold_convert_loc (loc, type, arg1));
9532 if (TREE_OVERFLOW (intres))
9533 intres = drop_tree_overflow (intres);
9534 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9535 ? boolean_true_node : boolean_false_node);
9537 else
9539 tree ctype = build_complex_type (type);
9540 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9541 arg0, arg1);
9542 tree tgt = save_expr (call);
9543 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9544 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9545 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9548 if (ovf_only)
9549 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9551 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9552 tree store
9553 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9554 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9557 /* Fold a call to __builtin_FILE to a constant string. */
9559 static inline tree
9560 fold_builtin_FILE (location_t loc)
9562 if (const char *fname = LOCATION_FILE (loc))
9564 /* The documentation says this builtin is equivalent to the preprocessor
9565 __FILE__ macro so it appears appropriate to use the same file prefix
9566 mappings. */
9567 fname = remap_macro_filename (fname);
9568 return build_string_literal (fname);
9571 return build_string_literal ("");
9574 /* Fold a call to __builtin_FUNCTION to a constant string. */
9576 static inline tree
9577 fold_builtin_FUNCTION ()
9579 const char *name = "";
9581 if (current_function_decl)
9582 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9584 return build_string_literal (name);
9587 /* Fold a call to __builtin_LINE to an integer constant. */
9589 static inline tree
9590 fold_builtin_LINE (location_t loc, tree type)
9592 return build_int_cst (type, LOCATION_LINE (loc));
9595 /* Fold a call to built-in function FNDECL with 0 arguments.
9596 This function returns NULL_TREE if no simplification was possible. */
9598 static tree
9599 fold_builtin_0 (location_t loc, tree fndecl)
9601 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9602 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9603 switch (fcode)
9605 case BUILT_IN_FILE:
9606 return fold_builtin_FILE (loc);
9608 case BUILT_IN_FUNCTION:
9609 return fold_builtin_FUNCTION ();
9611 case BUILT_IN_LINE:
9612 return fold_builtin_LINE (loc, type);
9614 CASE_FLT_FN (BUILT_IN_INF):
9615 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9616 case BUILT_IN_INFD32:
9617 case BUILT_IN_INFD64:
9618 case BUILT_IN_INFD128:
9619 return fold_builtin_inf (loc, type, true);
9621 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9622 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9623 return fold_builtin_inf (loc, type, false);
9625 case BUILT_IN_CLASSIFY_TYPE:
9626 return fold_builtin_classify_type (NULL_TREE);
9628 case BUILT_IN_UNREACHABLE:
9629 /* Rewrite any explicit calls to __builtin_unreachable. */
9630 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
9631 return build_builtin_unreachable (loc);
9632 break;
9634 default:
9635 break;
9637 return NULL_TREE;
9640 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9641 This function returns NULL_TREE if no simplification was possible. */
9643 static tree
9644 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9647 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9649 if (TREE_CODE (arg0) == ERROR_MARK)
9650 return NULL_TREE;
9652 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9653 return ret;
9655 switch (fcode)
9657 case BUILT_IN_CONSTANT_P:
9659 tree val = fold_builtin_constant_p (arg0);
9661 /* Gimplification will pull the CALL_EXPR for the builtin out of
9662 an if condition. When not optimizing, we'll not CSE it back.
9663 To avoid link error types of regressions, return false now. */
9664 if (!val && !optimize)
9665 val = integer_zero_node;
9667 return val;
9670 case BUILT_IN_CLASSIFY_TYPE:
9671 return fold_builtin_classify_type (arg0);
9673 case BUILT_IN_STRLEN:
9674 return fold_builtin_strlen (loc, expr, type, arg0);
9676 CASE_FLT_FN (BUILT_IN_FABS):
9677 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9678 case BUILT_IN_FABSD32:
9679 case BUILT_IN_FABSD64:
9680 case BUILT_IN_FABSD128:
9681 return fold_builtin_fabs (loc, arg0, type);
9683 case BUILT_IN_ABS:
9684 case BUILT_IN_LABS:
9685 case BUILT_IN_LLABS:
9686 case BUILT_IN_IMAXABS:
9687 return fold_builtin_abs (loc, arg0, type);
9689 CASE_FLT_FN (BUILT_IN_CONJ):
9690 if (validate_arg (arg0, COMPLEX_TYPE)
9691 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9692 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9693 break;
9695 CASE_FLT_FN (BUILT_IN_CREAL):
9696 if (validate_arg (arg0, COMPLEX_TYPE)
9697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9698 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9699 break;
9701 CASE_FLT_FN (BUILT_IN_CIMAG):
9702 if (validate_arg (arg0, COMPLEX_TYPE)
9703 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9704 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9705 break;
9707 CASE_FLT_FN (BUILT_IN_CARG):
9708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
9709 return fold_builtin_carg (loc, arg0, type);
9711 case BUILT_IN_ISASCII:
9712 return fold_builtin_isascii (loc, arg0);
9714 case BUILT_IN_TOASCII:
9715 return fold_builtin_toascii (loc, arg0);
9717 case BUILT_IN_ISDIGIT:
9718 return fold_builtin_isdigit (loc, arg0);
9720 CASE_FLT_FN (BUILT_IN_FINITE):
9721 case BUILT_IN_FINITED32:
9722 case BUILT_IN_FINITED64:
9723 case BUILT_IN_FINITED128:
9724 case BUILT_IN_ISFINITE:
9726 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9727 if (ret)
9728 return ret;
9729 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9732 CASE_FLT_FN (BUILT_IN_ISINF):
9733 case BUILT_IN_ISINFD32:
9734 case BUILT_IN_ISINFD64:
9735 case BUILT_IN_ISINFD128:
9737 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9738 if (ret)
9739 return ret;
9740 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9743 case BUILT_IN_ISNORMAL:
9744 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9746 case BUILT_IN_ISINF_SIGN:
9747 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9749 CASE_FLT_FN (BUILT_IN_ISNAN):
9750 case BUILT_IN_ISNAND32:
9751 case BUILT_IN_ISNAND64:
9752 case BUILT_IN_ISNAND128:
9753 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9755 case BUILT_IN_ISSIGNALING:
9756 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
9758 case BUILT_IN_FREE:
9759 if (integer_zerop (arg0))
9760 return build_empty_stmt (loc);
9761 break;
9763 default:
9764 break;
9767 return NULL_TREE;
9771 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9772 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9773 if no simplification was possible. */
9775 static tree
9776 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9778 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9779 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9781 if (TREE_CODE (arg0) == ERROR_MARK
9782 || TREE_CODE (arg1) == ERROR_MARK)
9783 return NULL_TREE;
9785 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9786 return ret;
9788 switch (fcode)
9790 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9791 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9792 if (validate_arg (arg0, REAL_TYPE)
9793 && validate_arg (arg1, POINTER_TYPE))
9794 return do_mpfr_lgamma_r (arg0, arg1, type);
9795 break;
9797 CASE_FLT_FN (BUILT_IN_FREXP):
9798 return fold_builtin_frexp (loc, arg0, arg1, type);
9800 CASE_FLT_FN (BUILT_IN_MODF):
9801 return fold_builtin_modf (loc, arg0, arg1, type);
9803 case BUILT_IN_STRSPN:
9804 return fold_builtin_strspn (loc, expr, arg0, arg1);
9806 case BUILT_IN_STRCSPN:
9807 return fold_builtin_strcspn (loc, expr, arg0, arg1);
9809 case BUILT_IN_STRPBRK:
9810 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9812 case BUILT_IN_EXPECT:
9813 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9815 case BUILT_IN_ISGREATER:
9816 return fold_builtin_unordered_cmp (loc, fndecl,
9817 arg0, arg1, UNLE_EXPR, LE_EXPR);
9818 case BUILT_IN_ISGREATEREQUAL:
9819 return fold_builtin_unordered_cmp (loc, fndecl,
9820 arg0, arg1, UNLT_EXPR, LT_EXPR);
9821 case BUILT_IN_ISLESS:
9822 return fold_builtin_unordered_cmp (loc, fndecl,
9823 arg0, arg1, UNGE_EXPR, GE_EXPR);
9824 case BUILT_IN_ISLESSEQUAL:
9825 return fold_builtin_unordered_cmp (loc, fndecl,
9826 arg0, arg1, UNGT_EXPR, GT_EXPR);
9827 case BUILT_IN_ISLESSGREATER:
9828 return fold_builtin_unordered_cmp (loc, fndecl,
9829 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9830 case BUILT_IN_ISUNORDERED:
9831 return fold_builtin_unordered_cmp (loc, fndecl,
9832 arg0, arg1, UNORDERED_EXPR,
9833 NOP_EXPR);
9835 /* We do the folding for va_start in the expander. */
9836 case BUILT_IN_VA_START:
9837 break;
9839 case BUILT_IN_OBJECT_SIZE:
9840 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9841 return fold_builtin_object_size (arg0, arg1, fcode);
9843 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9844 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9846 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9847 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9849 default:
9850 break;
9852 return NULL_TREE;
9855 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9856 and ARG2.
9857 This function returns NULL_TREE if no simplification was possible. */
9859 static tree
9860 fold_builtin_3 (location_t loc, tree fndecl,
9861 tree arg0, tree arg1, tree arg2)
9863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9864 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9866 if (TREE_CODE (arg0) == ERROR_MARK
9867 || TREE_CODE (arg1) == ERROR_MARK
9868 || TREE_CODE (arg2) == ERROR_MARK)
9869 return NULL_TREE;
9871 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9872 arg0, arg1, arg2))
9873 return ret;
9875 switch (fcode)
9878 CASE_FLT_FN (BUILT_IN_SINCOS):
9879 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9881 CASE_FLT_FN (BUILT_IN_REMQUO):
9882 if (validate_arg (arg0, REAL_TYPE)
9883 && validate_arg (arg1, REAL_TYPE)
9884 && validate_arg (arg2, POINTER_TYPE))
9885 return do_mpfr_remquo (arg0, arg1, arg2);
9886 break;
9888 case BUILT_IN_MEMCMP:
9889 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9891 case BUILT_IN_EXPECT:
9892 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9894 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9895 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9897 case BUILT_IN_ADD_OVERFLOW:
9898 case BUILT_IN_SUB_OVERFLOW:
9899 case BUILT_IN_MUL_OVERFLOW:
9900 case BUILT_IN_ADD_OVERFLOW_P:
9901 case BUILT_IN_SUB_OVERFLOW_P:
9902 case BUILT_IN_MUL_OVERFLOW_P:
9903 case BUILT_IN_SADD_OVERFLOW:
9904 case BUILT_IN_SADDL_OVERFLOW:
9905 case BUILT_IN_SADDLL_OVERFLOW:
9906 case BUILT_IN_SSUB_OVERFLOW:
9907 case BUILT_IN_SSUBL_OVERFLOW:
9908 case BUILT_IN_SSUBLL_OVERFLOW:
9909 case BUILT_IN_SMUL_OVERFLOW:
9910 case BUILT_IN_SMULL_OVERFLOW:
9911 case BUILT_IN_SMULLL_OVERFLOW:
9912 case BUILT_IN_UADD_OVERFLOW:
9913 case BUILT_IN_UADDL_OVERFLOW:
9914 case BUILT_IN_UADDLL_OVERFLOW:
9915 case BUILT_IN_USUB_OVERFLOW:
9916 case BUILT_IN_USUBL_OVERFLOW:
9917 case BUILT_IN_USUBLL_OVERFLOW:
9918 case BUILT_IN_UMUL_OVERFLOW:
9919 case BUILT_IN_UMULL_OVERFLOW:
9920 case BUILT_IN_UMULLL_OVERFLOW:
9921 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9923 default:
9924 break;
9926 return NULL_TREE;
9929 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9930 ARGS is an array of NARGS arguments. IGNORE is true if the result
9931 of the function call is ignored. This function returns NULL_TREE
9932 if no simplification was possible. */
9934 static tree
9935 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9936 int nargs, bool)
9938 tree ret = NULL_TREE;
9940 switch (nargs)
9942 case 0:
9943 ret = fold_builtin_0 (loc, fndecl);
9944 break;
9945 case 1:
9946 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9947 break;
9948 case 2:
9949 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9950 break;
9951 case 3:
9952 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9953 break;
9954 default:
9955 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9956 break;
9958 if (ret)
9960 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9961 SET_EXPR_LOCATION (ret, loc);
9962 return ret;
9964 return NULL_TREE;
9967 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9968 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9969 of arguments in ARGS to be omitted. OLDNARGS is the number of
9970 elements in ARGS. */
9972 static tree
9973 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9974 int skip, tree fndecl, int n, va_list newargs)
9976 int nargs = oldnargs - skip + n;
9977 tree *buffer;
9979 if (n > 0)
9981 int i, j;
9983 buffer = XALLOCAVEC (tree, nargs);
9984 for (i = 0; i < n; i++)
9985 buffer[i] = va_arg (newargs, tree);
9986 for (j = skip; j < oldnargs; j++, i++)
9987 buffer[i] = args[j];
9989 else
9990 buffer = args + skip;
9992 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9995 /* Return true if FNDECL shouldn't be folded right now.
9996 If a built-in function has an inline attribute always_inline
9997 wrapper, defer folding it after always_inline functions have
9998 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9999 might not be performed. */
10001 bool
10002 avoid_folding_inline_builtin (tree fndecl)
10004 return (DECL_DECLARED_INLINE_P (fndecl)
10005 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10006 && cfun
10007 && !cfun->always_inline_functions_inlined
10008 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10011 /* A wrapper function for builtin folding that prevents warnings for
10012 "statement without effect" and the like, caused by removing the
10013 call node earlier than the warning is generated. */
10015 tree
10016 fold_call_expr (location_t loc, tree exp, bool ignore)
10018 tree ret = NULL_TREE;
10019 tree fndecl = get_callee_fndecl (exp);
10020 if (fndecl && fndecl_built_in_p (fndecl)
10021 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10022 yet. Defer folding until we see all the arguments
10023 (after inlining). */
10024 && !CALL_EXPR_VA_ARG_PACK (exp))
10026 int nargs = call_expr_nargs (exp);
10028 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10029 instead last argument is __builtin_va_arg_pack (). Defer folding
10030 even in that case, until arguments are finalized. */
10031 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10033 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10034 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10035 return NULL_TREE;
10038 if (avoid_folding_inline_builtin (fndecl))
10039 return NULL_TREE;
10041 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10042 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10043 CALL_EXPR_ARGP (exp), ignore);
10044 else
10046 tree *args = CALL_EXPR_ARGP (exp);
10047 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10048 if (ret)
10049 return ret;
10052 return NULL_TREE;
10055 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10056 N arguments are passed in the array ARGARRAY. Return a folded
10057 expression or NULL_TREE if no simplification was possible. */
10059 tree
10060 fold_builtin_call_array (location_t loc, tree,
10061 tree fn,
10062 int n,
10063 tree *argarray)
10065 if (TREE_CODE (fn) != ADDR_EXPR)
10066 return NULL_TREE;
10068 tree fndecl = TREE_OPERAND (fn, 0);
10069 if (TREE_CODE (fndecl) == FUNCTION_DECL
10070 && fndecl_built_in_p (fndecl))
10072 /* If last argument is __builtin_va_arg_pack (), arguments to this
10073 function are not finalized yet. Defer folding until they are. */
10074 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10076 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10077 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10078 return NULL_TREE;
10080 if (avoid_folding_inline_builtin (fndecl))
10081 return NULL_TREE;
10082 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10083 return targetm.fold_builtin (fndecl, n, argarray, false);
10084 else
10085 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10088 return NULL_TREE;
10091 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10092 along with N new arguments specified as the "..." parameters. SKIP
10093 is the number of arguments in EXP to be omitted. This function is used
10094 to do varargs-to-varargs transformations. */
10096 static tree
10097 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10099 va_list ap;
10100 tree t;
10102 va_start (ap, n);
10103 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10104 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10105 va_end (ap);
10107 return t;
10110 /* Validate a single argument ARG against a tree code CODE representing
10111 a type. Return true when argument is valid. */
10113 static bool
10114 validate_arg (const_tree arg, enum tree_code code)
10116 if (!arg)
10117 return false;
10118 else if (code == POINTER_TYPE)
10119 return POINTER_TYPE_P (TREE_TYPE (arg));
10120 else if (code == INTEGER_TYPE)
10121 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10122 return code == TREE_CODE (TREE_TYPE (arg));
10125 /* This function validates the types of a function call argument list
10126 against a specified list of tree_codes. If the last specifier is a 0,
10127 that represents an ellipses, otherwise the last specifier must be a
10128 VOID_TYPE.
10130 This is the GIMPLE version of validate_arglist. Eventually we want to
10131 completely convert builtins.cc to work from GIMPLEs and the tree based
10132 validate_arglist will then be removed. */
10134 bool
10135 validate_gimple_arglist (const gcall *call, ...)
10137 enum tree_code code;
10138 bool res = 0;
10139 va_list ap;
10140 const_tree arg;
10141 size_t i;
10143 va_start (ap, call);
10144 i = 0;
10148 code = (enum tree_code) va_arg (ap, int);
10149 switch (code)
10151 case 0:
10152 /* This signifies an ellipses, any further arguments are all ok. */
10153 res = true;
10154 goto end;
10155 case VOID_TYPE:
10156 /* This signifies an endlink, if no arguments remain, return
10157 true, otherwise return false. */
10158 res = (i == gimple_call_num_args (call));
10159 goto end;
10160 default:
10161 /* If no parameters remain or the parameter's code does not
10162 match the specified code, return false. Otherwise continue
10163 checking any remaining arguments. */
10164 arg = gimple_call_arg (call, i++);
10165 if (!validate_arg (arg, code))
10166 goto end;
10167 break;
10170 while (1);
10172 /* We need gotos here since we can only have one VA_CLOSE in a
10173 function. */
10174 end: ;
10175 va_end (ap);
10177 return res;
10180 /* Default target-specific builtin expander that does nothing. */
10183 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10184 rtx target ATTRIBUTE_UNUSED,
10185 rtx subtarget ATTRIBUTE_UNUSED,
10186 machine_mode mode ATTRIBUTE_UNUSED,
10187 int ignore ATTRIBUTE_UNUSED)
10189 return NULL_RTX;
10192 /* Returns true is EXP represents data that would potentially reside
10193 in a readonly section. */
10195 bool
10196 readonly_data_expr (tree exp)
10198 STRIP_NOPS (exp);
10200 if (TREE_CODE (exp) != ADDR_EXPR)
10201 return false;
10203 exp = get_base_address (TREE_OPERAND (exp, 0));
10204 if (!exp)
10205 return false;
10207 /* Make sure we call decl_readonly_section only for trees it
10208 can handle (since it returns true for everything it doesn't
10209 understand). */
10210 if (TREE_CODE (exp) == STRING_CST
10211 || TREE_CODE (exp) == CONSTRUCTOR
10212 || (VAR_P (exp) && TREE_STATIC (exp)))
10213 return decl_readonly_section (exp, 0);
10214 else
10215 return false;
10218 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10219 to the call, and TYPE is its return type.
10221 Return NULL_TREE if no simplification was possible, otherwise return the
10222 simplified form of the call as a tree.
10224 The simplified form may be a constant or other expression which
10225 computes the same value, but in a more efficient manner (including
10226 calls to other builtin functions).
10228 The call may contain arguments which need to be evaluated, but
10229 which are not useful to determine the result of the call. In
10230 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10231 COMPOUND_EXPR will be an argument which must be evaluated.
10232 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10233 COMPOUND_EXPR in the chain will contain the tree for the simplified
10234 form of the builtin function call. */
10236 static tree
10237 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
10239 if (!validate_arg (s1, POINTER_TYPE)
10240 || !validate_arg (s2, POINTER_TYPE))
10241 return NULL_TREE;
10243 tree fn;
10244 const char *p1, *p2;
10246 p2 = c_getstr (s2);
10247 if (p2 == NULL)
10248 return NULL_TREE;
10250 p1 = c_getstr (s1);
10251 if (p1 != NULL)
10253 const char *r = strpbrk (p1, p2);
10254 tree tem;
10256 if (r == NULL)
10257 return build_int_cst (TREE_TYPE (s1), 0);
10259 /* Return an offset into the constant string argument. */
10260 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10261 return fold_convert_loc (loc, type, tem);
10264 if (p2[0] == '\0')
10265 /* strpbrk(x, "") == NULL.
10266 Evaluate and ignore s1 in case it had side-effects. */
10267 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10269 if (p2[1] != '\0')
10270 return NULL_TREE; /* Really call strpbrk. */
10272 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10273 if (!fn)
10274 return NULL_TREE;
10276 /* New argument list transforming strpbrk(s1, s2) to
10277 strchr(s1, s2[0]). */
10278 return build_call_expr_loc (loc, fn, 2, s1,
10279 build_int_cst (integer_type_node, p2[0]));
10282 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10283 to the call.
10285 Return NULL_TREE if no simplification was possible, otherwise return the
10286 simplified form of the call as a tree.
10288 The simplified form may be a constant or other expression which
10289 computes the same value, but in a more efficient manner (including
10290 calls to other builtin functions).
10292 The call may contain arguments which need to be evaluated, but
10293 which are not useful to determine the result of the call. In
10294 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10295 COMPOUND_EXPR will be an argument which must be evaluated.
10296 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10297 COMPOUND_EXPR in the chain will contain the tree for the simplified
10298 form of the builtin function call. */
10300 static tree
10301 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10303 if (!validate_arg (s1, POINTER_TYPE)
10304 || !validate_arg (s2, POINTER_TYPE))
10305 return NULL_TREE;
10307 if (!check_nul_terminated_array (expr, s1)
10308 || !check_nul_terminated_array (expr, s2))
10309 return NULL_TREE;
10311 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10313 /* If either argument is "", return NULL_TREE. */
10314 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10315 /* Evaluate and ignore both arguments in case either one has
10316 side-effects. */
10317 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10318 s1, s2);
10319 return NULL_TREE;
10322 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10323 to the call.
10325 Return NULL_TREE if no simplification was possible, otherwise return the
10326 simplified form of the call as a tree.
10328 The simplified form may be a constant or other expression which
10329 computes the same value, but in a more efficient manner (including
10330 calls to other builtin functions).
10332 The call may contain arguments which need to be evaluated, but
10333 which are not useful to determine the result of the call. In
10334 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10335 COMPOUND_EXPR will be an argument which must be evaluated.
10336 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10337 COMPOUND_EXPR in the chain will contain the tree for the simplified
10338 form of the builtin function call. */
10340 static tree
10341 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10343 if (!validate_arg (s1, POINTER_TYPE)
10344 || !validate_arg (s2, POINTER_TYPE))
10345 return NULL_TREE;
10347 if (!check_nul_terminated_array (expr, s1)
10348 || !check_nul_terminated_array (expr, s2))
10349 return NULL_TREE;
10351 /* If the first argument is "", return NULL_TREE. */
10352 const char *p1 = c_getstr (s1);
10353 if (p1 && *p1 == '\0')
10355 /* Evaluate and ignore argument s2 in case it has
10356 side-effects. */
10357 return omit_one_operand_loc (loc, size_type_node,
10358 size_zero_node, s2);
10361 /* If the second argument is "", return __builtin_strlen(s1). */
10362 const char *p2 = c_getstr (s2);
10363 if (p2 && *p2 == '\0')
10365 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10367 /* If the replacement _DECL isn't initialized, don't do the
10368 transformation. */
10369 if (!fn)
10370 return NULL_TREE;
10372 return build_call_expr_loc (loc, fn, 1, s1);
10374 return NULL_TREE;
10377 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10378 produced. False otherwise. This is done so that we don't output the error
10379 or warning twice or three times. */
10381 bool
10382 fold_builtin_next_arg (tree exp, bool va_start_p)
10384 tree fntype = TREE_TYPE (current_function_decl);
10385 int nargs = call_expr_nargs (exp);
10386 tree arg;
10387 /* There is good chance the current input_location points inside the
10388 definition of the va_start macro (perhaps on the token for
10389 builtin) in a system header, so warnings will not be emitted.
10390 Use the location in real source code. */
10391 location_t current_location =
10392 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10393 NULL);
10395 if (!stdarg_p (fntype))
10397 error ("%<va_start%> used in function with fixed arguments");
10398 return true;
10401 if (va_start_p)
10403 if (va_start_p && (nargs != 2))
10405 error ("wrong number of arguments to function %<va_start%>");
10406 return true;
10408 arg = CALL_EXPR_ARG (exp, 1);
10410 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10411 when we checked the arguments and if needed issued a warning. */
10412 else
10414 if (nargs == 0)
10416 /* Evidently an out of date version of <stdarg.h>; can't validate
10417 va_start's second argument, but can still work as intended. */
10418 warning_at (current_location,
10419 OPT_Wvarargs,
10420 "%<__builtin_next_arg%> called without an argument");
10421 return true;
10423 else if (nargs > 1)
10425 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10426 return true;
10428 arg = CALL_EXPR_ARG (exp, 0);
10431 if (TREE_CODE (arg) == SSA_NAME
10432 && SSA_NAME_VAR (arg))
10433 arg = SSA_NAME_VAR (arg);
10435 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10436 or __builtin_next_arg (0) the first time we see it, after checking
10437 the arguments and if needed issuing a warning. */
10438 if (!integer_zerop (arg))
10440 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10442 /* Strip off all nops for the sake of the comparison. This
10443 is not quite the same as STRIP_NOPS. It does more.
10444 We must also strip off INDIRECT_EXPR for C++ reference
10445 parameters. */
10446 while (CONVERT_EXPR_P (arg)
10447 || TREE_CODE (arg) == INDIRECT_REF)
10448 arg = TREE_OPERAND (arg, 0);
10449 if (arg != last_parm)
10451 /* FIXME: Sometimes with the tree optimizers we can get the
10452 not the last argument even though the user used the last
10453 argument. We just warn and set the arg to be the last
10454 argument so that we will get wrong-code because of
10455 it. */
10456 warning_at (current_location,
10457 OPT_Wvarargs,
10458 "second parameter of %<va_start%> not last named argument");
10461 /* Undefined by C99 7.15.1.4p4 (va_start):
10462 "If the parameter parmN is declared with the register storage
10463 class, with a function or array type, or with a type that is
10464 not compatible with the type that results after application of
10465 the default argument promotions, the behavior is undefined."
10467 else if (DECL_REGISTER (arg))
10469 warning_at (current_location,
10470 OPT_Wvarargs,
10471 "undefined behavior when second parameter of "
10472 "%<va_start%> is declared with %<register%> storage");
10475 /* We want to verify the second parameter just once before the tree
10476 optimizers are run and then avoid keeping it in the tree,
10477 as otherwise we could warn even for correct code like:
10478 void foo (int i, ...)
10479 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10480 if (va_start_p)
10481 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10482 else
10483 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10485 return false;
10489 /* Expand a call EXP to __builtin_object_size. */
10491 static rtx
10492 expand_builtin_object_size (tree exp)
10494 tree ost;
10495 int object_size_type;
10496 tree fndecl = get_callee_fndecl (exp);
10498 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10500 error ("first argument of %qD must be a pointer, second integer constant",
10501 fndecl);
10502 expand_builtin_trap ();
10503 return const0_rtx;
10506 ost = CALL_EXPR_ARG (exp, 1);
10507 STRIP_NOPS (ost);
10509 if (TREE_CODE (ost) != INTEGER_CST
10510 || tree_int_cst_sgn (ost) < 0
10511 || compare_tree_int (ost, 3) > 0)
10513 error ("last argument of %qD is not integer constant between 0 and 3",
10514 fndecl);
10515 expand_builtin_trap ();
10516 return const0_rtx;
10519 object_size_type = tree_to_shwi (ost);
10521 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10524 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10525 FCODE is the BUILT_IN_* to use.
10526 Return NULL_RTX if we failed; the caller should emit a normal call,
10527 otherwise try to get the result in TARGET, if convenient (and in
10528 mode MODE if that's convenient). */
10530 static rtx
10531 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10532 enum built_in_function fcode)
10534 if (!validate_arglist (exp,
10535 POINTER_TYPE,
10536 fcode == BUILT_IN_MEMSET_CHK
10537 ? INTEGER_TYPE : POINTER_TYPE,
10538 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10539 return NULL_RTX;
10541 tree dest = CALL_EXPR_ARG (exp, 0);
10542 tree src = CALL_EXPR_ARG (exp, 1);
10543 tree len = CALL_EXPR_ARG (exp, 2);
10544 tree size = CALL_EXPR_ARG (exp, 3);
10546 /* FIXME: Set access mode to write only for memset et al. */
10547 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10548 /*srcstr=*/NULL_TREE, size, access_read_write);
10550 if (!tree_fits_uhwi_p (size))
10551 return NULL_RTX;
10553 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10555 /* Avoid transforming the checking call to an ordinary one when
10556 an overflow has been detected or when the call couldn't be
10557 validated because the size is not constant. */
10558 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10559 return NULL_RTX;
10561 tree fn = NULL_TREE;
10562 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10563 mem{cpy,pcpy,move,set} is available. */
10564 switch (fcode)
10566 case BUILT_IN_MEMCPY_CHK:
10567 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10568 break;
10569 case BUILT_IN_MEMPCPY_CHK:
10570 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10571 break;
10572 case BUILT_IN_MEMMOVE_CHK:
10573 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10574 break;
10575 case BUILT_IN_MEMSET_CHK:
10576 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10577 break;
10578 default:
10579 break;
10582 if (! fn)
10583 return NULL_RTX;
10585 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10586 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10587 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10588 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10590 else if (fcode == BUILT_IN_MEMSET_CHK)
10591 return NULL_RTX;
10592 else
10594 unsigned int dest_align = get_pointer_alignment (dest);
10596 /* If DEST is not a pointer type, call the normal function. */
10597 if (dest_align == 0)
10598 return NULL_RTX;
10600 /* If SRC and DEST are the same (and not volatile), do nothing. */
10601 if (operand_equal_p (src, dest, 0))
10603 tree expr;
10605 if (fcode != BUILT_IN_MEMPCPY_CHK)
10607 /* Evaluate and ignore LEN in case it has side-effects. */
10608 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10609 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10612 expr = fold_build_pointer_plus (dest, len);
10613 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10616 /* __memmove_chk special case. */
10617 if (fcode == BUILT_IN_MEMMOVE_CHK)
10619 unsigned int src_align = get_pointer_alignment (src);
10621 if (src_align == 0)
10622 return NULL_RTX;
10624 /* If src is categorized for a readonly section we can use
10625 normal __memcpy_chk. */
10626 if (readonly_data_expr (src))
10628 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10629 if (!fn)
10630 return NULL_RTX;
10631 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10632 dest, src, len, size);
10633 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10634 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10635 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10638 return NULL_RTX;
10642 /* Emit warning if a buffer overflow is detected at compile time. */
10644 static void
10645 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10647 /* The source string. */
10648 tree srcstr = NULL_TREE;
10649 /* The size of the destination object returned by __builtin_object_size. */
10650 tree objsize = NULL_TREE;
10651 /* The string that is being concatenated with (as in __strcat_chk)
10652 or null if it isn't. */
10653 tree catstr = NULL_TREE;
10654 /* The maximum length of the source sequence in a bounded operation
10655 (such as __strncat_chk) or null if the operation isn't bounded
10656 (such as __strcat_chk). */
10657 tree maxread = NULL_TREE;
10658 /* The exact size of the access (such as in __strncpy_chk). */
10659 tree size = NULL_TREE;
10660 /* The access by the function that's checked. Except for snprintf
10661 both writing and reading is checked. */
10662 access_mode mode = access_read_write;
10664 switch (fcode)
10666 case BUILT_IN_STRCPY_CHK:
10667 case BUILT_IN_STPCPY_CHK:
10668 srcstr = CALL_EXPR_ARG (exp, 1);
10669 objsize = CALL_EXPR_ARG (exp, 2);
10670 break;
10672 case BUILT_IN_STRCAT_CHK:
10673 /* For __strcat_chk the warning will be emitted only if overflowing
10674 by at least strlen (dest) + 1 bytes. */
10675 catstr = CALL_EXPR_ARG (exp, 0);
10676 srcstr = CALL_EXPR_ARG (exp, 1);
10677 objsize = CALL_EXPR_ARG (exp, 2);
10678 break;
10680 case BUILT_IN_STRNCAT_CHK:
10681 catstr = CALL_EXPR_ARG (exp, 0);
10682 srcstr = CALL_EXPR_ARG (exp, 1);
10683 maxread = CALL_EXPR_ARG (exp, 2);
10684 objsize = CALL_EXPR_ARG (exp, 3);
10685 break;
10687 case BUILT_IN_STRNCPY_CHK:
10688 case BUILT_IN_STPNCPY_CHK:
10689 srcstr = CALL_EXPR_ARG (exp, 1);
10690 size = CALL_EXPR_ARG (exp, 2);
10691 objsize = CALL_EXPR_ARG (exp, 3);
10692 break;
10694 case BUILT_IN_SNPRINTF_CHK:
10695 case BUILT_IN_VSNPRINTF_CHK:
10696 maxread = CALL_EXPR_ARG (exp, 1);
10697 objsize = CALL_EXPR_ARG (exp, 3);
10698 /* The only checked access the write to the destination. */
10699 mode = access_write_only;
10700 break;
10701 default:
10702 gcc_unreachable ();
10705 if (catstr && maxread)
10707 /* Check __strncat_chk. There is no way to determine the length
10708 of the string to which the source string is being appended so
10709 just warn when the length of the source string is not known. */
10710 check_strncat_sizes (exp, objsize);
10711 return;
10714 check_access (exp, size, maxread, srcstr, objsize, mode);
10717 /* Emit warning if a buffer overflow is detected at compile time
10718 in __sprintf_chk/__vsprintf_chk calls. */
10720 static void
10721 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10723 tree size, len, fmt;
10724 const char *fmt_str;
10725 int nargs = call_expr_nargs (exp);
10727 /* Verify the required arguments in the original call. */
10729 if (nargs < 4)
10730 return;
10731 size = CALL_EXPR_ARG (exp, 2);
10732 fmt = CALL_EXPR_ARG (exp, 3);
10734 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10735 return;
10737 /* Check whether the format is a literal string constant. */
10738 fmt_str = c_getstr (fmt);
10739 if (fmt_str == NULL)
10740 return;
10742 if (!init_target_chars ())
10743 return;
10745 /* If the format doesn't contain % args or %%, we know its size. */
10746 if (strchr (fmt_str, target_percent) == 0)
10747 len = build_int_cstu (size_type_node, strlen (fmt_str));
10748 /* If the format is "%s" and first ... argument is a string literal,
10749 we know it too. */
10750 else if (fcode == BUILT_IN_SPRINTF_CHK
10751 && strcmp (fmt_str, target_percent_s) == 0)
10753 tree arg;
10755 if (nargs < 5)
10756 return;
10757 arg = CALL_EXPR_ARG (exp, 4);
10758 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10759 return;
10761 len = c_strlen (arg, 1);
10762 if (!len || ! tree_fits_uhwi_p (len))
10763 return;
10765 else
10766 return;
10768 /* Add one for the terminating nul. */
10769 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10771 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10772 access_write_only);
10775 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10776 if possible. */
10778 static tree
10779 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10781 tree bytes;
10782 int object_size_type;
10784 if (!validate_arg (ptr, POINTER_TYPE)
10785 || !validate_arg (ost, INTEGER_TYPE))
10786 return NULL_TREE;
10788 STRIP_NOPS (ost);
10790 if (TREE_CODE (ost) != INTEGER_CST
10791 || tree_int_cst_sgn (ost) < 0
10792 || compare_tree_int (ost, 3) > 0)
10793 return NULL_TREE;
10795 object_size_type = tree_to_shwi (ost);
10797 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10798 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10799 and (size_t) 0 for types 2 and 3. */
10800 if (TREE_SIDE_EFFECTS (ptr))
10801 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10803 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10804 object_size_type |= OST_DYNAMIC;
10806 if (TREE_CODE (ptr) == ADDR_EXPR)
10808 compute_builtin_object_size (ptr, object_size_type, &bytes);
10809 if ((object_size_type & OST_DYNAMIC)
10810 || int_fits_type_p (bytes, size_type_node))
10811 return fold_convert (size_type_node, bytes);
10813 else if (TREE_CODE (ptr) == SSA_NAME)
10815 /* If object size is not known yet, delay folding until
10816 later. Maybe subsequent passes will help determining
10817 it. */
10818 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10819 && ((object_size_type & OST_DYNAMIC)
10820 || int_fits_type_p (bytes, size_type_node)))
10821 return fold_convert (size_type_node, bytes);
10824 return NULL_TREE;
10827 /* Builtins with folding operations that operate on "..." arguments
10828 need special handling; we need to store the arguments in a convenient
10829 data structure before attempting any folding. Fortunately there are
10830 only a few builtins that fall into this category. FNDECL is the
10831 function, EXP is the CALL_EXPR for the call. */
10833 static tree
10834 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10836 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10837 tree ret = NULL_TREE;
10839 switch (fcode)
10841 case BUILT_IN_FPCLASSIFY:
10842 ret = fold_builtin_fpclassify (loc, args, nargs);
10843 break;
10845 default:
10846 break;
10848 if (ret)
10850 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10851 SET_EXPR_LOCATION (ret, loc);
10852 suppress_warning (ret);
10853 return ret;
10855 return NULL_TREE;
10858 /* Initialize format string characters in the target charset. */
10860 bool
10861 init_target_chars (void)
10863 static bool init;
10864 if (!init)
10866 target_newline = lang_hooks.to_target_charset ('\n');
10867 target_percent = lang_hooks.to_target_charset ('%');
10868 target_c = lang_hooks.to_target_charset ('c');
10869 target_s = lang_hooks.to_target_charset ('s');
10870 if (target_newline == 0 || target_percent == 0 || target_c == 0
10871 || target_s == 0)
10872 return false;
10874 target_percent_c[0] = target_percent;
10875 target_percent_c[1] = target_c;
10876 target_percent_c[2] = '\0';
10878 target_percent_s[0] = target_percent;
10879 target_percent_s[1] = target_s;
10880 target_percent_s[2] = '\0';
10882 target_percent_s_newline[0] = target_percent;
10883 target_percent_s_newline[1] = target_s;
10884 target_percent_s_newline[2] = target_newline;
10885 target_percent_s_newline[3] = '\0';
10887 init = true;
10889 return true;
10892 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10893 and no overflow/underflow occurred. INEXACT is true if M was not
10894 exactly calculated. TYPE is the tree type for the result. This
10895 function assumes that you cleared the MPFR flags and then
10896 calculated M to see if anything subsequently set a flag prior to
10897 entering this function. Return NULL_TREE if any checks fail. */
10899 static tree
10900 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10902 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10903 overflow/underflow occurred. If -frounding-math, proceed iff the
10904 result of calling FUNC was exact. */
10905 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10906 && (!flag_rounding_math || !inexact))
10908 REAL_VALUE_TYPE rr;
10910 real_from_mpfr (&rr, m, type, MPFR_RNDN);
10911 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10912 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10913 but the mpft_t is not, then we underflowed in the
10914 conversion. */
10915 if (real_isfinite (&rr)
10916 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10918 REAL_VALUE_TYPE rmode;
10920 real_convert (&rmode, TYPE_MODE (type), &rr);
10921 /* Proceed iff the specified mode can hold the value. */
10922 if (real_identical (&rmode, &rr))
10923 return build_real (type, rmode);
10926 return NULL_TREE;
10929 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10930 number and no overflow/underflow occurred. INEXACT is true if M
10931 was not exactly calculated. TYPE is the tree type for the result.
10932 This function assumes that you cleared the MPFR flags and then
10933 calculated M to see if anything subsequently set a flag prior to
10934 entering this function. Return NULL_TREE if any checks fail, if
10935 FORCE_CONVERT is true, then bypass the checks. */
10937 static tree
10938 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10940 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10941 overflow/underflow occurred. If -frounding-math, proceed iff the
10942 result of calling FUNC was exact. */
10943 if (force_convert
10944 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10945 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10946 && (!flag_rounding_math || !inexact)))
10948 REAL_VALUE_TYPE re, im;
10950 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10951 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10952 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10953 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10954 but the mpft_t is not, then we underflowed in the
10955 conversion. */
10956 if (force_convert
10957 || (real_isfinite (&re) && real_isfinite (&im)
10958 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10959 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10961 REAL_VALUE_TYPE re_mode, im_mode;
10963 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10964 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10965 /* Proceed iff the specified mode can hold the value. */
10966 if (force_convert
10967 || (real_identical (&re_mode, &re)
10968 && real_identical (&im_mode, &im)))
10969 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10970 build_real (TREE_TYPE (type), im_mode));
10973 return NULL_TREE;
10976 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10977 the pointer *(ARG_QUO) and return the result. The type is taken
10978 from the type of ARG0 and is used for setting the precision of the
10979 calculation and results. */
10981 static tree
10982 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10984 tree const type = TREE_TYPE (arg0);
10985 tree result = NULL_TREE;
10987 STRIP_NOPS (arg0);
10988 STRIP_NOPS (arg1);
10990 /* To proceed, MPFR must exactly represent the target floating point
10991 format, which only happens when the target base equals two. */
10992 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10993 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10994 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10996 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10997 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10999 if (real_isfinite (ra0) && real_isfinite (ra1))
11001 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11002 const int prec = fmt->p;
11003 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11004 tree result_rem;
11005 long integer_quo;
11006 mpfr_t m0, m1;
11008 mpfr_inits2 (prec, m0, m1, NULL);
11009 mpfr_from_real (m0, ra0, MPFR_RNDN);
11010 mpfr_from_real (m1, ra1, MPFR_RNDN);
11011 mpfr_clear_flags ();
11012 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11013 /* Remquo is independent of the rounding mode, so pass
11014 inexact=0 to do_mpfr_ckconv(). */
11015 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11016 mpfr_clears (m0, m1, NULL);
11017 if (result_rem)
11019 /* MPFR calculates quo in the host's long so it may
11020 return more bits in quo than the target int can hold
11021 if sizeof(host long) > sizeof(target int). This can
11022 happen even for native compilers in LP64 mode. In
11023 these cases, modulo the quo value with the largest
11024 number that the target int can hold while leaving one
11025 bit for the sign. */
11026 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11027 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11029 /* Dereference the quo pointer argument. */
11030 arg_quo = build_fold_indirect_ref (arg_quo);
11031 /* Proceed iff a valid pointer type was passed in. */
11032 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11034 /* Set the value. */
11035 tree result_quo
11036 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11037 build_int_cst (TREE_TYPE (arg_quo),
11038 integer_quo));
11039 TREE_SIDE_EFFECTS (result_quo) = 1;
11040 /* Combine the quo assignment with the rem. */
11041 result = fold_build2 (COMPOUND_EXPR, type,
11042 result_quo, result_rem);
11043 suppress_warning (result, OPT_Wunused_value);
11044 result = non_lvalue (result);
11049 return result;
11052 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11053 resulting value as a tree with type TYPE. The mpfr precision is
11054 set to the precision of TYPE. We assume that this mpfr function
11055 returns zero if the result could be calculated exactly within the
11056 requested precision. In addition, the integer pointer represented
11057 by ARG_SG will be dereferenced and set to the appropriate signgam
11058 (-1,1) value. */
11060 static tree
11061 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11063 tree result = NULL_TREE;
11065 STRIP_NOPS (arg);
11067 /* To proceed, MPFR must exactly represent the target floating point
11068 format, which only happens when the target base equals two. Also
11069 verify ARG is a constant and that ARG_SG is an int pointer. */
11070 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11071 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11072 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11073 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11075 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11077 /* In addition to NaN and Inf, the argument cannot be zero or a
11078 negative integer. */
11079 if (real_isfinite (ra)
11080 && ra->cl != rvc_zero
11081 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11083 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11084 const int prec = fmt->p;
11085 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11086 int inexact, sg;
11087 mpfr_t m;
11088 tree result_lg;
11090 mpfr_init2 (m, prec);
11091 mpfr_from_real (m, ra, MPFR_RNDN);
11092 mpfr_clear_flags ();
11093 inexact = mpfr_lgamma (m, &sg, m, rnd);
11094 result_lg = do_mpfr_ckconv (m, type, inexact);
11095 mpfr_clear (m);
11096 if (result_lg)
11098 tree result_sg;
11100 /* Dereference the arg_sg pointer argument. */
11101 arg_sg = build_fold_indirect_ref (arg_sg);
11102 /* Assign the signgam value into *arg_sg. */
11103 result_sg = fold_build2 (MODIFY_EXPR,
11104 TREE_TYPE (arg_sg), arg_sg,
11105 build_int_cst (TREE_TYPE (arg_sg), sg));
11106 TREE_SIDE_EFFECTS (result_sg) = 1;
11107 /* Combine the signgam assignment with the lgamma result. */
11108 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11109 result_sg, result_lg));
11114 return result;
11117 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11118 mpc function FUNC on it and return the resulting value as a tree
11119 with type TYPE. The mpfr precision is set to the precision of
11120 TYPE. We assume that function FUNC returns zero if the result
11121 could be calculated exactly within the requested precision. If
11122 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11123 in the arguments and/or results. */
11125 tree
11126 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11127 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11129 tree result = NULL_TREE;
11131 STRIP_NOPS (arg0);
11132 STRIP_NOPS (arg1);
11134 /* To proceed, MPFR must exactly represent the target floating point
11135 format, which only happens when the target base equals two. */
11136 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11137 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11138 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11139 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11140 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11142 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11143 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11144 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11145 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11147 if (do_nonfinite
11148 || (real_isfinite (re0) && real_isfinite (im0)
11149 && real_isfinite (re1) && real_isfinite (im1)))
11151 const struct real_format *const fmt =
11152 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11153 const int prec = fmt->p;
11154 const mpfr_rnd_t rnd = fmt->round_towards_zero
11155 ? MPFR_RNDZ : MPFR_RNDN;
11156 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11157 int inexact;
11158 mpc_t m0, m1;
11160 mpc_init2 (m0, prec);
11161 mpc_init2 (m1, prec);
11162 mpfr_from_real (mpc_realref (m0), re0, rnd);
11163 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11164 mpfr_from_real (mpc_realref (m1), re1, rnd);
11165 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11166 mpfr_clear_flags ();
11167 inexact = func (m0, m0, m1, crnd);
11168 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11169 mpc_clear (m0);
11170 mpc_clear (m1);
11174 return result;
11177 /* A wrapper function for builtin folding that prevents warnings for
11178 "statement without effect" and the like, caused by removing the
11179 call node earlier than the warning is generated. */
11181 tree
11182 fold_call_stmt (gcall *stmt, bool ignore)
11184 tree ret = NULL_TREE;
11185 tree fndecl = gimple_call_fndecl (stmt);
11186 location_t loc = gimple_location (stmt);
11187 if (fndecl && fndecl_built_in_p (fndecl)
11188 && !gimple_call_va_arg_pack_p (stmt))
11190 int nargs = gimple_call_num_args (stmt);
11191 tree *args = (nargs > 0
11192 ? gimple_call_arg_ptr (stmt, 0)
11193 : &error_mark_node);
11195 if (avoid_folding_inline_builtin (fndecl))
11196 return NULL_TREE;
11197 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11199 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11201 else
11203 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11204 if (ret)
11206 /* Propagate location information from original call to
11207 expansion of builtin. Otherwise things like
11208 maybe_emit_chk_warning, that operate on the expansion
11209 of a builtin, will use the wrong location information. */
11210 if (gimple_has_location (stmt))
11212 tree realret = ret;
11213 if (TREE_CODE (ret) == NOP_EXPR)
11214 realret = TREE_OPERAND (ret, 0);
11215 if (CAN_HAVE_LOCATION_P (realret)
11216 && !EXPR_HAS_LOCATION (realret))
11217 SET_EXPR_LOCATION (realret, loc);
11218 return realret;
11220 return ret;
11224 return NULL_TREE;
11227 /* Look up the function in builtin_decl that corresponds to DECL
11228 and set ASMSPEC as its user assembler name. DECL must be a
11229 function decl that declares a builtin. */
11231 void
11232 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11234 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11235 && asmspec != 0);
11237 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11238 set_user_assembler_name (builtin, asmspec);
11240 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11241 && INT_TYPE_SIZE < BITS_PER_WORD)
11243 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11244 set_user_assembler_libfunc ("ffs", asmspec);
11245 set_optab_libfunc (ffs_optab, mode, "ffs");
11249 /* Return true if DECL is a builtin that expands to a constant or similarly
11250 simple code. */
11251 bool
11252 is_simple_builtin (tree decl)
11254 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11255 switch (DECL_FUNCTION_CODE (decl))
11257 /* Builtins that expand to constants. */
11258 case BUILT_IN_CONSTANT_P:
11259 case BUILT_IN_EXPECT:
11260 case BUILT_IN_OBJECT_SIZE:
11261 case BUILT_IN_UNREACHABLE:
11262 /* Simple register moves or loads from stack. */
11263 case BUILT_IN_ASSUME_ALIGNED:
11264 case BUILT_IN_RETURN_ADDRESS:
11265 case BUILT_IN_EXTRACT_RETURN_ADDR:
11266 case BUILT_IN_FROB_RETURN_ADDR:
11267 case BUILT_IN_RETURN:
11268 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11269 case BUILT_IN_FRAME_ADDRESS:
11270 case BUILT_IN_VA_END:
11271 case BUILT_IN_STACK_SAVE:
11272 case BUILT_IN_STACK_RESTORE:
11273 case BUILT_IN_DWARF_CFA:
11274 /* Exception state returns or moves registers around. */
11275 case BUILT_IN_EH_FILTER:
11276 case BUILT_IN_EH_POINTER:
11277 case BUILT_IN_EH_COPY_VALUES:
11278 return true;
11280 default:
11281 return false;
11284 return false;
11287 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11288 most probably expanded inline into reasonably simple code. This is a
11289 superset of is_simple_builtin. */
11290 bool
11291 is_inexpensive_builtin (tree decl)
11293 if (!decl)
11294 return false;
11295 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11296 return true;
11297 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11298 switch (DECL_FUNCTION_CODE (decl))
11300 case BUILT_IN_ABS:
11301 CASE_BUILT_IN_ALLOCA:
11302 case BUILT_IN_BSWAP16:
11303 case BUILT_IN_BSWAP32:
11304 case BUILT_IN_BSWAP64:
11305 case BUILT_IN_BSWAP128:
11306 case BUILT_IN_CLZ:
11307 case BUILT_IN_CLZIMAX:
11308 case BUILT_IN_CLZL:
11309 case BUILT_IN_CLZLL:
11310 case BUILT_IN_CTZ:
11311 case BUILT_IN_CTZIMAX:
11312 case BUILT_IN_CTZL:
11313 case BUILT_IN_CTZLL:
11314 case BUILT_IN_FFS:
11315 case BUILT_IN_FFSIMAX:
11316 case BUILT_IN_FFSL:
11317 case BUILT_IN_FFSLL:
11318 case BUILT_IN_IMAXABS:
11319 case BUILT_IN_FINITE:
11320 case BUILT_IN_FINITEF:
11321 case BUILT_IN_FINITEL:
11322 case BUILT_IN_FINITED32:
11323 case BUILT_IN_FINITED64:
11324 case BUILT_IN_FINITED128:
11325 case BUILT_IN_FPCLASSIFY:
11326 case BUILT_IN_ISFINITE:
11327 case BUILT_IN_ISINF_SIGN:
11328 case BUILT_IN_ISINF:
11329 case BUILT_IN_ISINFF:
11330 case BUILT_IN_ISINFL:
11331 case BUILT_IN_ISINFD32:
11332 case BUILT_IN_ISINFD64:
11333 case BUILT_IN_ISINFD128:
11334 case BUILT_IN_ISNAN:
11335 case BUILT_IN_ISNANF:
11336 case BUILT_IN_ISNANL:
11337 case BUILT_IN_ISNAND32:
11338 case BUILT_IN_ISNAND64:
11339 case BUILT_IN_ISNAND128:
11340 case BUILT_IN_ISNORMAL:
11341 case BUILT_IN_ISGREATER:
11342 case BUILT_IN_ISGREATEREQUAL:
11343 case BUILT_IN_ISLESS:
11344 case BUILT_IN_ISLESSEQUAL:
11345 case BUILT_IN_ISLESSGREATER:
11346 case BUILT_IN_ISUNORDERED:
11347 case BUILT_IN_VA_ARG_PACK:
11348 case BUILT_IN_VA_ARG_PACK_LEN:
11349 case BUILT_IN_VA_COPY:
11350 case BUILT_IN_TRAP:
11351 case BUILT_IN_UNREACHABLE_TRAP:
11352 case BUILT_IN_SAVEREGS:
11353 case BUILT_IN_POPCOUNTL:
11354 case BUILT_IN_POPCOUNTLL:
11355 case BUILT_IN_POPCOUNTIMAX:
11356 case BUILT_IN_POPCOUNT:
11357 case BUILT_IN_PARITYL:
11358 case BUILT_IN_PARITYLL:
11359 case BUILT_IN_PARITYIMAX:
11360 case BUILT_IN_PARITY:
11361 case BUILT_IN_LABS:
11362 case BUILT_IN_LLABS:
11363 case BUILT_IN_PREFETCH:
11364 case BUILT_IN_ACC_ON_DEVICE:
11365 return true;
11367 default:
11368 return is_simple_builtin (decl);
11371 return false;
11374 /* Return true if T is a constant and the value cast to a target char
11375 can be represented by a host char.
11376 Store the casted char constant in *P if so. */
11378 bool
11379 target_char_cst_p (tree t, char *p)
11381 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11382 return false;
11384 *p = (char)tree_to_uhwi (t);
11385 return true;
11388 /* Return true if the builtin DECL is implemented in a standard library.
11389 Otherwise return false which doesn't guarantee it is not (thus the list
11390 of handled builtins below may be incomplete). */
11392 bool
11393 builtin_with_linkage_p (tree decl)
11395 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11396 switch (DECL_FUNCTION_CODE (decl))
11398 CASE_FLT_FN (BUILT_IN_ACOS):
11399 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
11400 CASE_FLT_FN (BUILT_IN_ACOSH):
11401 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
11402 CASE_FLT_FN (BUILT_IN_ASIN):
11403 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
11404 CASE_FLT_FN (BUILT_IN_ASINH):
11405 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
11406 CASE_FLT_FN (BUILT_IN_ATAN):
11407 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
11408 CASE_FLT_FN (BUILT_IN_ATANH):
11409 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
11410 CASE_FLT_FN (BUILT_IN_ATAN2):
11411 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
11412 CASE_FLT_FN (BUILT_IN_CBRT):
11413 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
11414 CASE_FLT_FN (BUILT_IN_CEIL):
11415 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11416 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11417 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11418 CASE_FLT_FN (BUILT_IN_COS):
11419 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
11420 CASE_FLT_FN (BUILT_IN_COSH):
11421 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
11422 CASE_FLT_FN (BUILT_IN_ERF):
11423 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
11424 CASE_FLT_FN (BUILT_IN_ERFC):
11425 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
11426 CASE_FLT_FN (BUILT_IN_EXP):
11427 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
11428 CASE_FLT_FN (BUILT_IN_EXP2):
11429 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
11430 CASE_FLT_FN (BUILT_IN_EXPM1):
11431 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
11432 CASE_FLT_FN (BUILT_IN_FABS):
11433 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11434 CASE_FLT_FN (BUILT_IN_FDIM):
11435 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
11436 CASE_FLT_FN (BUILT_IN_FLOOR):
11437 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11438 CASE_FLT_FN (BUILT_IN_FMA):
11439 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11440 CASE_FLT_FN (BUILT_IN_FMAX):
11441 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11442 CASE_FLT_FN (BUILT_IN_FMIN):
11443 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11444 CASE_FLT_FN (BUILT_IN_FMOD):
11445 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
11446 CASE_FLT_FN (BUILT_IN_FREXP):
11447 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
11448 CASE_FLT_FN (BUILT_IN_HYPOT):
11449 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
11450 CASE_FLT_FN (BUILT_IN_ILOGB):
11451 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
11452 CASE_FLT_FN (BUILT_IN_LDEXP):
11453 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
11454 CASE_FLT_FN (BUILT_IN_LGAMMA):
11455 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
11456 CASE_FLT_FN (BUILT_IN_LLRINT):
11457 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
11458 CASE_FLT_FN (BUILT_IN_LLROUND):
11459 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
11460 CASE_FLT_FN (BUILT_IN_LOG):
11461 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
11462 CASE_FLT_FN (BUILT_IN_LOG10):
11463 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
11464 CASE_FLT_FN (BUILT_IN_LOG1P):
11465 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
11466 CASE_FLT_FN (BUILT_IN_LOG2):
11467 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
11468 CASE_FLT_FN (BUILT_IN_LOGB):
11469 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
11470 CASE_FLT_FN (BUILT_IN_LRINT):
11471 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
11472 CASE_FLT_FN (BUILT_IN_LROUND):
11473 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
11474 CASE_FLT_FN (BUILT_IN_MODF):
11475 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
11476 CASE_FLT_FN (BUILT_IN_NAN):
11477 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
11478 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11479 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11480 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11481 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
11482 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11483 CASE_FLT_FN (BUILT_IN_POW):
11484 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
11485 CASE_FLT_FN (BUILT_IN_REMAINDER):
11486 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
11487 CASE_FLT_FN (BUILT_IN_REMQUO):
11488 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
11489 CASE_FLT_FN (BUILT_IN_RINT):
11490 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11491 CASE_FLT_FN (BUILT_IN_ROUND):
11492 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11493 CASE_FLT_FN (BUILT_IN_SCALBLN):
11494 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
11495 CASE_FLT_FN (BUILT_IN_SCALBN):
11496 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
11497 CASE_FLT_FN (BUILT_IN_SIN):
11498 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
11499 CASE_FLT_FN (BUILT_IN_SINH):
11500 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
11501 CASE_FLT_FN (BUILT_IN_SINCOS):
11502 CASE_FLT_FN (BUILT_IN_SQRT):
11503 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11504 CASE_FLT_FN (BUILT_IN_TAN):
11505 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
11506 CASE_FLT_FN (BUILT_IN_TANH):
11507 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
11508 CASE_FLT_FN (BUILT_IN_TGAMMA):
11509 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
11510 CASE_FLT_FN (BUILT_IN_TRUNC):
11511 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11512 return true;
11514 case BUILT_IN_STPCPY:
11515 case BUILT_IN_STPNCPY:
11516 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11517 by libiberty's stpcpy.c for MinGW targets so we need to return true
11518 in order to be able to build libiberty in LTO mode for them. */
11519 return true;
11521 default:
11522 break;
11524 return false;
11527 /* Return true if OFFRNG is bounded to a subrange of offset values
11528 valid for the largest possible object. */
11530 bool
11531 access_ref::offset_bounded () const
11533 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11534 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11535 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11538 /* If CALLEE has known side effects, fill in INFO and return true.
11539 See tree-ssa-structalias.cc:find_func_aliases
11540 for the list of builtins we might need to handle here. */
11542 attr_fnspec
11543 builtin_fnspec (tree callee)
11545 built_in_function code = DECL_FUNCTION_CODE (callee);
11547 switch (code)
11549 /* All the following functions read memory pointed to by
11550 their second argument and write memory pointed to by first
11551 argument.
11552 strcat/strncat additionally reads memory pointed to by the first
11553 argument. */
11554 case BUILT_IN_STRCAT:
11555 case BUILT_IN_STRCAT_CHK:
11556 return "1cW 1 ";
11557 case BUILT_IN_STRNCAT:
11558 case BUILT_IN_STRNCAT_CHK:
11559 return "1cW 13";
11560 case BUILT_IN_STRCPY:
11561 case BUILT_IN_STRCPY_CHK:
11562 return "1cO 1 ";
11563 case BUILT_IN_STPCPY:
11564 case BUILT_IN_STPCPY_CHK:
11565 return ".cO 1 ";
11566 case BUILT_IN_STRNCPY:
11567 case BUILT_IN_MEMCPY:
11568 case BUILT_IN_MEMMOVE:
11569 case BUILT_IN_TM_MEMCPY:
11570 case BUILT_IN_TM_MEMMOVE:
11571 case BUILT_IN_STRNCPY_CHK:
11572 case BUILT_IN_MEMCPY_CHK:
11573 case BUILT_IN_MEMMOVE_CHK:
11574 return "1cO313";
11575 case BUILT_IN_MEMPCPY:
11576 case BUILT_IN_MEMPCPY_CHK:
11577 return ".cO313";
11578 case BUILT_IN_STPNCPY:
11579 case BUILT_IN_STPNCPY_CHK:
11580 return ".cO313";
11581 case BUILT_IN_BCOPY:
11582 return ".c23O3";
11583 case BUILT_IN_BZERO:
11584 return ".cO2";
11585 case BUILT_IN_MEMCMP:
11586 case BUILT_IN_MEMCMP_EQ:
11587 case BUILT_IN_BCMP:
11588 case BUILT_IN_STRNCMP:
11589 case BUILT_IN_STRNCMP_EQ:
11590 case BUILT_IN_STRNCASECMP:
11591 return ".cR3R3";
11593 /* The following functions read memory pointed to by their
11594 first argument. */
11595 CASE_BUILT_IN_TM_LOAD (1):
11596 CASE_BUILT_IN_TM_LOAD (2):
11597 CASE_BUILT_IN_TM_LOAD (4):
11598 CASE_BUILT_IN_TM_LOAD (8):
11599 CASE_BUILT_IN_TM_LOAD (FLOAT):
11600 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11601 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11602 CASE_BUILT_IN_TM_LOAD (M64):
11603 CASE_BUILT_IN_TM_LOAD (M128):
11604 CASE_BUILT_IN_TM_LOAD (M256):
11605 case BUILT_IN_TM_LOG:
11606 case BUILT_IN_TM_LOG_1:
11607 case BUILT_IN_TM_LOG_2:
11608 case BUILT_IN_TM_LOG_4:
11609 case BUILT_IN_TM_LOG_8:
11610 case BUILT_IN_TM_LOG_FLOAT:
11611 case BUILT_IN_TM_LOG_DOUBLE:
11612 case BUILT_IN_TM_LOG_LDOUBLE:
11613 case BUILT_IN_TM_LOG_M64:
11614 case BUILT_IN_TM_LOG_M128:
11615 case BUILT_IN_TM_LOG_M256:
11616 return ".cR ";
11618 case BUILT_IN_INDEX:
11619 case BUILT_IN_RINDEX:
11620 case BUILT_IN_STRCHR:
11621 case BUILT_IN_STRLEN:
11622 case BUILT_IN_STRRCHR:
11623 return ".cR ";
11624 case BUILT_IN_STRNLEN:
11625 return ".cR2";
11627 /* These read memory pointed to by the first argument.
11628 Allocating memory does not have any side-effects apart from
11629 being the definition point for the pointer.
11630 Unix98 specifies that errno is set on allocation failure. */
11631 case BUILT_IN_STRDUP:
11632 return "mCR ";
11633 case BUILT_IN_STRNDUP:
11634 return "mCR2";
11635 /* Allocating memory does not have any side-effects apart from
11636 being the definition point for the pointer. */
11637 case BUILT_IN_MALLOC:
11638 case BUILT_IN_ALIGNED_ALLOC:
11639 case BUILT_IN_CALLOC:
11640 case BUILT_IN_GOMP_ALLOC:
11641 return "mC";
11642 CASE_BUILT_IN_ALLOCA:
11643 return "mc";
11644 /* These read memory pointed to by the first argument with size
11645 in the third argument. */
11646 case BUILT_IN_MEMCHR:
11647 return ".cR3";
11648 /* These read memory pointed to by the first and second arguments. */
11649 case BUILT_IN_STRSTR:
11650 case BUILT_IN_STRPBRK:
11651 case BUILT_IN_STRCASECMP:
11652 case BUILT_IN_STRCSPN:
11653 case BUILT_IN_STRSPN:
11654 case BUILT_IN_STRCMP:
11655 case BUILT_IN_STRCMP_EQ:
11656 return ".cR R ";
11657 /* Freeing memory kills the pointed-to memory. More importantly
11658 the call has to serve as a barrier for moving loads and stores
11659 across it. */
11660 case BUILT_IN_STACK_RESTORE:
11661 case BUILT_IN_FREE:
11662 case BUILT_IN_GOMP_FREE:
11663 return ".co ";
11664 case BUILT_IN_VA_END:
11665 return ".cO ";
11666 /* Realloc serves both as allocation point and deallocation point. */
11667 case BUILT_IN_REALLOC:
11668 return ".Cw ";
11669 case BUILT_IN_GAMMA_R:
11670 case BUILT_IN_GAMMAF_R:
11671 case BUILT_IN_GAMMAL_R:
11672 case BUILT_IN_LGAMMA_R:
11673 case BUILT_IN_LGAMMAF_R:
11674 case BUILT_IN_LGAMMAL_R:
11675 return ".C. Ot";
11676 case BUILT_IN_FREXP:
11677 case BUILT_IN_FREXPF:
11678 case BUILT_IN_FREXPL:
11679 case BUILT_IN_MODF:
11680 case BUILT_IN_MODFF:
11681 case BUILT_IN_MODFL:
11682 return ".c. Ot";
11683 case BUILT_IN_REMQUO:
11684 case BUILT_IN_REMQUOF:
11685 case BUILT_IN_REMQUOL:
11686 return ".c. . Ot";
11687 case BUILT_IN_SINCOS:
11688 case BUILT_IN_SINCOSF:
11689 case BUILT_IN_SINCOSL:
11690 return ".c. OtOt";
11691 case BUILT_IN_MEMSET:
11692 case BUILT_IN_MEMSET_CHK:
11693 case BUILT_IN_TM_MEMSET:
11694 return "1cO3";
11695 CASE_BUILT_IN_TM_STORE (1):
11696 CASE_BUILT_IN_TM_STORE (2):
11697 CASE_BUILT_IN_TM_STORE (4):
11698 CASE_BUILT_IN_TM_STORE (8):
11699 CASE_BUILT_IN_TM_STORE (FLOAT):
11700 CASE_BUILT_IN_TM_STORE (DOUBLE):
11701 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11702 CASE_BUILT_IN_TM_STORE (M64):
11703 CASE_BUILT_IN_TM_STORE (M128):
11704 CASE_BUILT_IN_TM_STORE (M256):
11705 return ".cO ";
11706 case BUILT_IN_STACK_SAVE:
11707 case BUILT_IN_RETURN:
11708 case BUILT_IN_EH_POINTER:
11709 case BUILT_IN_EH_FILTER:
11710 case BUILT_IN_UNWIND_RESUME:
11711 case BUILT_IN_CXA_END_CLEANUP:
11712 case BUILT_IN_EH_COPY_VALUES:
11713 case BUILT_IN_FRAME_ADDRESS:
11714 case BUILT_IN_APPLY_ARGS:
11715 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11716 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11717 case BUILT_IN_PREFETCH:
11718 case BUILT_IN_DWARF_CFA:
11719 case BUILT_IN_RETURN_ADDRESS:
11720 return ".c";
11721 case BUILT_IN_ASSUME_ALIGNED:
11722 return "1cX ";
11723 /* But posix_memalign stores a pointer into the memory pointed to
11724 by its first argument. */
11725 case BUILT_IN_POSIX_MEMALIGN:
11726 return ".cOt";
11728 default:
11729 return "";