hppa: Fix pr104869.C on hpux
[official-gcc.git] / gcc / builtins.cc
blob6af2a0b07aa7680917e3648bbc1fcfcd329707c8
1 /* Expand builtin functions.
2 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
76 #include "tree-dfa.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
90 /* Define the names of the builtin function types and codes. */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
97 #include "builtins.def"
100 /* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
104 /* Non-zero if __builtin_constant_p should be folded right away. */
105 bool force_folding_builtin_constant_p;
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
122 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
123 optab);
124 static rtx expand_builtin_cexpi (tree, rtx);
125 static rtx expand_builtin_issignaling (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx inline_expand_builtin_bytecmp (tree, rtx);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx expand_builtin_memcpy (tree, rtx);
136 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
137 rtx target, tree exp,
138 memop_ret retmode,
139 bool might_overlap);
140 static rtx expand_builtin_memmove (tree, rtx);
141 static rtx expand_builtin_mempcpy (tree, rtx);
142 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_iseqsig (location_t, tree, tree);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 return (startswith (name, "__builtin_")
204 || startswith (name, "__sync_")
205 || startswith (name, "__atomic_"));
208 /* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
212 bool
213 called_as_built_in (tree node)
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
217 will have. */
218 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
219 return is_builtin_name (name);
222 /* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
232 whereas foo() itself starts on an even address.
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
237 bool
238 get_object_alignment_2 (tree exp, unsigned int *alignp,
239 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 poly_int64 bitsize, bitpos;
242 tree offset;
243 machine_mode mode;
244 int unsignedp, reversep, volatilep;
245 unsigned int align = BITS_PER_UNIT;
246 bool known_alignment = false;
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
250 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
251 &unsignedp, &reversep, &volatilep);
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
255 if (TREE_CODE (exp) == FUNCTION_DECL)
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
262 align = 2 * BITS_PER_UNIT;
264 else if (TREE_CODE (exp) == LABEL_DECL)
266 else if (TREE_CODE (exp) == CONST_DECL)
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp = DECL_INITIAL (exp);
270 align = TYPE_ALIGN (TREE_TYPE (exp));
271 if (CONSTANT_CLASS_P (exp))
272 align = targetm.constant_alignment (exp, align);
274 known_alignment = true;
276 else if (DECL_P (exp))
278 align = DECL_ALIGN (exp);
279 known_alignment = true;
281 else if (TREE_CODE (exp) == INDIRECT_REF
282 || TREE_CODE (exp) == MEM_REF
283 || TREE_CODE (exp) == TARGET_MEM_REF)
285 tree addr = TREE_OPERAND (exp, 0);
286 unsigned ptr_align;
287 unsigned HOST_WIDE_INT ptr_bitpos;
288 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 /* If the address is explicitely aligned, handle that. */
291 if (TREE_CODE (addr) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
295 ptr_bitmask *= BITS_PER_UNIT;
296 align = least_bit_hwi (ptr_bitmask);
297 addr = TREE_OPERAND (addr, 0);
300 known_alignment
301 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
302 align = MAX (ptr_align, align);
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos &= ptr_bitmask;
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
309 if (TREE_CODE (exp) == TARGET_MEM_REF)
311 if (TMR_INDEX (exp))
313 unsigned HOST_WIDE_INT step = 1;
314 if (TMR_STEP (exp))
315 step = TREE_INT_CST_LOW (TMR_STEP (exp));
316 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 if (TMR_INDEX2 (exp))
319 align = BITS_PER_UNIT;
320 known_alignment = false;
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
326 alignment knowledge and if using that alignment would
327 improve the situation. */
328 unsigned int talign;
329 if (!addr_p && !known_alignment
330 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
331 && talign > align)
332 align = talign;
333 else
335 /* Else adjust bitpos accordingly. */
336 bitpos += ptr_bitpos;
337 if (TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
339 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
342 else if (TREE_CODE (exp) == STRING_CST)
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 if (CONSTANT_CLASS_P (exp))
348 align = targetm.constant_alignment (exp, align);
350 known_alignment = true;
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
355 if (offset)
357 unsigned int trailing_zeros = tree_ctz (offset);
358 if (trailing_zeros < HOST_BITS_PER_INT)
360 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
361 if (inner)
362 align = MIN (align, inner);
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
369 if (alt_align != 0 && alt_align < align)
371 align = alt_align;
372 known_alignment = false;
375 *alignp = align;
376 *bitposp = bitpos.coeffs[0] & (align - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
390 with it. */
391 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
392 exp = TREE_OPERAND (exp, 0);
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* Compute the length of a null-terminated character string or wide
547 character string handling character sizes of 1, 2, and 4 bytes.
548 TREE_STRING_LENGTH is not the right way because it evaluates to
549 the size of the character array in bytes (as opposed to characters)
550 and because it can contain a zero byte in the middle.
552 ONLY_VALUE should be nonzero if the result is not going to be emitted
553 into the instruction stream and zero if it is going to be expanded.
554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
555 is returned, otherwise NULL, since
556 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
557 evaluate the side-effects.
559 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
560 accesses. Note that this implies the result is not going to be emitted
561 into the instruction stream.
563 Additional information about the string accessed may be recorded
564 in DATA. For example, if ARG references an unterminated string,
565 then the declaration will be stored in the DECL field. If the
566 length of the unterminated string can be determined, it'll be
567 stored in the LEN field. Note this length could well be different
568 than what a C strlen call would return.
570 ELTSIZE is 1 for normal single byte character strings, and 2 or
571 4 for wide characer strings. ELTSIZE is by default 1.
573 The value returned is of type `ssizetype'. */
575 tree
576 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
578 /* If we were not passed a DATA pointer, then get one to a local
579 structure. That avoids having to check DATA for NULL before
580 each time we want to use it. */
581 c_strlen_data local_strlen_data = { };
582 if (!data)
583 data = &local_strlen_data;
585 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
587 tree src = STRIP_NOPS (arg);
588 if (TREE_CODE (src) == COND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 tree len1, len2;
593 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
594 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
595 if (tree_int_cst_equal (len1, len2))
596 return len1;
599 if (TREE_CODE (src) == COMPOUND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
601 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
603 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
605 /* Offset from the beginning of the string in bytes. */
606 tree byteoff;
607 tree memsize;
608 tree decl;
609 src = string_constant (src, &byteoff, &memsize, &decl);
610 if (src == 0)
611 return NULL_TREE;
613 /* Determine the size of the string element. */
614 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
615 return NULL_TREE;
617 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
618 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
619 in case the latter is less than the size of the array, such as when
620 SRC refers to a short string literal used to initialize a large array.
621 In that case, the elements of the array after the terminating NUL are
622 all NUL. */
623 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
624 strelts = strelts / eltsize;
626 if (!tree_fits_uhwi_p (memsize))
627 return NULL_TREE;
629 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
631 /* PTR can point to the byte representation of any string type, including
632 char* and wchar_t*. */
633 const char *ptr = TREE_STRING_POINTER (src);
635 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
637 /* The code below works only for single byte character types. */
638 if (eltsize != 1)
639 return NULL_TREE;
641 /* If the string has an internal NUL character followed by any
642 non-NUL characters (e.g., "foo\0bar"), we can't compute
643 the offset to the following NUL if we don't know where to
644 start searching for it. */
645 unsigned len = string_length (ptr, eltsize, strelts);
647 /* Return when an embedded null character is found or none at all.
648 In the latter case, set the DECL/LEN field in the DATA structure
649 so that callers may examine them. */
650 if (len + 1 < strelts)
651 return NULL_TREE;
652 else if (len >= maxelts)
654 data->decl = decl;
655 data->off = byteoff;
656 data->minlen = ssize_int (len);
657 return NULL_TREE;
660 /* For empty strings the result should be zero. */
661 if (len == 0)
662 return ssize_int (0);
664 /* We don't know the starting offset, but we do know that the string
665 has no internal zero bytes. If the offset falls within the bounds
666 of the string subtract the offset from the length of the string,
667 and return that. Otherwise the length is zero. Take care to
668 use SAVE_EXPR in case the OFFSET has side-effects. */
669 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
670 : byteoff;
671 offsave = fold_convert_loc (loc, sizetype, offsave);
672 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
673 size_int (len));
674 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
675 offsave);
676 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
677 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
678 build_zero_cst (ssizetype));
681 /* Offset from the beginning of the string in elements. */
682 HOST_WIDE_INT eltoff;
684 /* We have a known offset into the string. Start searching there for
685 a null character if we can represent it as a single HOST_WIDE_INT. */
686 if (byteoff == 0)
687 eltoff = 0;
688 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
689 eltoff = -1;
690 else
691 eltoff = tree_to_uhwi (byteoff) / eltsize;
693 /* If the offset is known to be out of bounds, warn, and call strlen at
694 runtime. */
695 if (eltoff < 0 || eltoff >= maxelts)
697 /* Suppress multiple warnings for propagated constant strings. */
698 if (only_value != 2
699 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
700 && warning_at (loc, OPT_Warray_bounds_,
701 "offset %qwi outside bounds of constant string",
702 eltoff))
704 if (decl)
705 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
706 suppress_warning (arg, OPT_Warray_bounds_);
708 return NULL_TREE;
711 /* If eltoff is larger than strelts but less than maxelts the
712 string length is zero, since the excess memory will be zero. */
713 if (eltoff > strelts)
714 return ssize_int (0);
716 /* Use strlen to search for the first zero byte. Since any strings
717 constructed with build_string will have nulls appended, we win even
718 if we get handed something like (char[4])"abcd".
720 Since ELTOFF is our starting index into the string, no further
721 calculation is needed. */
722 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
723 strelts - eltoff);
725 /* Don't know what to return if there was no zero termination.
726 Ideally this would turn into a gcc_checking_assert over time.
727 Set DECL/LEN so callers can examine them. */
728 if (len >= maxelts - eltoff)
730 data->decl = decl;
731 data->off = byteoff;
732 data->minlen = ssize_int (len);
733 return NULL_TREE;
736 return ssize_int (len);
739 /* Return a constant integer corresponding to target reading
740 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
741 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
742 are assumed to be zero, otherwise it reads as many characters
743 as needed. */
746 c_readstr (const char *str, fixed_size_mode mode,
747 bool null_terminated_p/*=true*/)
749 auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
751 bytes.reserve (GET_MODE_SIZE (mode));
753 target_unit ch = 1;
754 for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
756 if (ch || !null_terminated_p)
757 ch = (unsigned char) str[i];
758 bytes.quick_push (ch);
761 return native_decode_rtx (mode, bytes, 0);
764 /* Cast a target constant CST to target CHAR and if that value fits into
765 host char type, return zero and put that value into variable pointed to by
766 P. */
768 static int
769 target_char_cast (tree cst, char *p)
771 unsigned HOST_WIDE_INT val, hostval;
773 if (TREE_CODE (cst) != INTEGER_CST
774 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
775 return 1;
777 /* Do not care if it fits or not right here. */
778 val = TREE_INT_CST_LOW (cst);
780 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
781 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
783 hostval = val;
784 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
785 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
787 if (val != hostval)
788 return 1;
790 *p = hostval;
791 return 0;
794 /* Similar to save_expr, but assumes that arbitrary code is not executed
795 in between the multiple evaluations. In particular, we assume that a
796 non-addressable local variable will not be modified. */
798 static tree
799 builtin_save_expr (tree exp)
801 if (TREE_CODE (exp) == SSA_NAME
802 || (TREE_ADDRESSABLE (exp) == 0
803 && (TREE_CODE (exp) == PARM_DECL
804 || (VAR_P (exp) && !TREE_STATIC (exp)))))
805 return exp;
807 return save_expr (exp);
810 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
811 times to get the address of either a higher stack frame, or a return
812 address located within it (depending on FNDECL_CODE). */
814 static rtx
815 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
817 int i;
818 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
819 if (tem == NULL_RTX)
821 /* For a zero count with __builtin_return_address, we don't care what
822 frame address we return, because target-specific definitions will
823 override us. Therefore frame pointer elimination is OK, and using
824 the soft frame pointer is OK.
826 For a nonzero count, or a zero count with __builtin_frame_address,
827 we require a stable offset from the current frame pointer to the
828 previous one, so we must use the hard frame pointer, and
829 we must disable frame pointer elimination. */
830 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
831 tem = frame_pointer_rtx;
832 else
834 tem = hard_frame_pointer_rtx;
836 /* Tell reload not to eliminate the frame pointer. */
837 crtl->accesses_prior_frames = 1;
841 if (count > 0)
842 SETUP_FRAME_ADDRESSES ();
844 /* On the SPARC, the return address is not in the frame, it is in a
845 register. There is no way to access it off of the current frame
846 pointer, but it can be accessed off the previous frame pointer by
847 reading the value from the register window save area. */
848 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
849 count--;
851 /* Scan back COUNT frames to the specified frame. */
852 for (i = 0; i < count; i++)
854 /* Assume the dynamic chain pointer is in the word that the
855 frame address points to, unless otherwise specified. */
856 tem = DYNAMIC_CHAIN_ADDRESS (tem);
857 tem = memory_address (Pmode, tem);
858 tem = gen_frame_mem (Pmode, tem);
859 tem = copy_to_reg (tem);
862 /* For __builtin_frame_address, return what we've got. But, on
863 the SPARC for example, we may have to add a bias. */
864 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
865 return FRAME_ADDR_RTX (tem);
867 /* For __builtin_return_address, get the return address from that frame. */
868 #ifdef RETURN_ADDR_RTX
869 tem = RETURN_ADDR_RTX (count, tem);
870 #else
871 tem = memory_address (Pmode,
872 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
873 tem = gen_frame_mem (Pmode, tem);
874 #endif
875 return tem;
878 /* Alias set used for setjmp buffer. */
879 static alias_set_type setjmp_alias_set = -1;
881 /* Construct the leading half of a __builtin_setjmp call. Control will
882 return to RECEIVER_LABEL. This is also called directly by the SJLJ
883 exception handling code. */
885 void
886 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
888 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
889 rtx stack_save;
890 rtx mem;
892 if (setjmp_alias_set == -1)
893 setjmp_alias_set = new_alias_set ();
895 buf_addr = convert_memory_address (Pmode, buf_addr);
897 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
899 /* We store the frame pointer and the address of receiver_label in
900 the buffer and use the rest of it for the stack save area, which
901 is machine-dependent. */
903 mem = gen_rtx_MEM (Pmode, buf_addr);
904 set_mem_alias_set (mem, setjmp_alias_set);
905 emit_move_insn (mem, hard_frame_pointer_rtx);
907 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
908 GET_MODE_SIZE (Pmode))),
909 set_mem_alias_set (mem, setjmp_alias_set);
911 emit_move_insn (validize_mem (mem),
912 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
914 stack_save = gen_rtx_MEM (sa_mode,
915 plus_constant (Pmode, buf_addr,
916 2 * GET_MODE_SIZE (Pmode)));
917 set_mem_alias_set (stack_save, setjmp_alias_set);
918 emit_stack_save (SAVE_NONLOCAL, &stack_save);
920 /* If there is further processing to do, do it. */
921 if (targetm.have_builtin_setjmp_setup ())
922 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
924 /* We have a nonlocal label. */
925 cfun->has_nonlocal_label = 1;
928 /* Construct the trailing part of a __builtin_setjmp call. This is
929 also called directly by the SJLJ exception handling code.
930 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
932 void
933 expand_builtin_setjmp_receiver (rtx receiver_label)
935 rtx chain;
937 /* Mark the FP as used when we get here, so we have to make sure it's
938 marked as used by this function. */
939 emit_use (hard_frame_pointer_rtx);
941 /* Mark the static chain as clobbered here so life information
942 doesn't get messed up for it. */
943 chain = rtx_for_static_chain (current_function_decl, true);
944 if (chain && REG_P (chain))
945 emit_clobber (chain);
947 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
949 /* If the argument pointer can be eliminated in favor of the
950 frame pointer, we don't need to restore it. We assume here
951 that if such an elimination is present, it can always be used.
952 This is the case on all known machines; if we don't make this
953 assumption, we do unnecessary saving on many machines. */
954 size_t i;
955 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
957 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
958 if (elim_regs[i].from == ARG_POINTER_REGNUM
959 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
960 break;
962 if (i == ARRAY_SIZE (elim_regs))
964 /* Now restore our arg pointer from the address at which it
965 was saved in our stack frame. */
966 emit_move_insn (crtl->args.internal_arg_pointer,
967 copy_to_reg (get_arg_pointer_save_area ()));
971 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
972 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
973 else if (targetm.have_nonlocal_goto_receiver ())
974 emit_insn (targetm.gen_nonlocal_goto_receiver ());
975 else
976 { /* Nothing */ }
978 /* We must not allow the code we just generated to be reordered by
979 scheduling. Specifically, the update of the frame pointer must
980 happen immediately, not later. */
981 emit_insn (gen_blockage ());
984 /* __builtin_longjmp is passed a pointer to an array of five words (not
985 all will be used on all machines). It operates similarly to the C
986 library function of the same name, but is more efficient. Much of
987 the code below is copied from the handling of non-local gotos. */
989 static void
990 expand_builtin_longjmp (rtx buf_addr, rtx value)
992 rtx fp, lab, stack;
993 rtx_insn *insn, *last;
994 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996 /* DRAP is needed for stack realign if longjmp is expanded to current
997 function */
998 if (SUPPORTS_STACK_ALIGNMENT)
999 crtl->need_drap = true;
1001 if (setjmp_alias_set == -1)
1002 setjmp_alias_set = new_alias_set ();
1004 buf_addr = convert_memory_address (Pmode, buf_addr);
1006 buf_addr = force_reg (Pmode, buf_addr);
1008 /* We require that the user must pass a second argument of 1, because
1009 that is what builtin_setjmp will return. */
1010 gcc_assert (value == const1_rtx);
1012 last = get_last_insn ();
1013 if (targetm.have_builtin_longjmp ())
1014 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1015 else
1017 fp = gen_rtx_MEM (Pmode, buf_addr);
1018 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1019 GET_MODE_SIZE (Pmode)));
1021 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1022 2 * GET_MODE_SIZE (Pmode)));
1023 set_mem_alias_set (fp, setjmp_alias_set);
1024 set_mem_alias_set (lab, setjmp_alias_set);
1025 set_mem_alias_set (stack, setjmp_alias_set);
1027 /* Pick up FP, label, and SP from the block and jump. This code is
1028 from expand_goto in stmt.cc; see there for detailed comments. */
1029 if (targetm.have_nonlocal_goto ())
1030 /* We have to pass a value to the nonlocal_goto pattern that will
1031 get copied into the static_chain pointer, but it does not matter
1032 what that value is, because builtin_setjmp does not use it. */
1033 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1034 else
1036 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1037 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 lab = copy_to_reg (lab);
1041 /* Restore the frame pointer and stack pointer. We must use a
1042 temporary since the setjmp buffer may be a local. */
1043 fp = copy_to_reg (fp);
1044 emit_stack_restore (SAVE_NONLOCAL, stack);
1046 /* Ensure the frame pointer move is not optimized. */
1047 emit_insn (gen_blockage ());
1048 emit_clobber (hard_frame_pointer_rtx);
1049 emit_clobber (frame_pointer_rtx);
1050 emit_move_insn (hard_frame_pointer_rtx, fp);
1052 emit_use (hard_frame_pointer_rtx);
1053 emit_use (stack_pointer_rtx);
1054 emit_indirect_jump (lab);
1058 /* Search backwards and mark the jump insn as a non-local goto.
1059 Note that this precludes the use of __builtin_longjmp to a
1060 __builtin_setjmp target in the same function. However, we've
1061 already cautioned the user that these functions are for
1062 internal exception handling use only. */
1063 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1065 gcc_assert (insn != last);
1067 if (JUMP_P (insn))
1069 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1070 break;
1072 else if (CALL_P (insn))
1073 break;
1077 static inline bool
1078 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1080 return (iter->i < iter->n);
1083 /* This function validates the types of a function call argument list
1084 against a specified list of tree_codes. If the last specifier is a 0,
1085 that represents an ellipsis, otherwise the last specifier must be a
1086 VOID_TYPE. */
1088 static bool
1089 validate_arglist (const_tree callexpr, ...)
1091 enum tree_code code;
1092 bool res = 0;
1093 va_list ap;
1094 const_call_expr_arg_iterator iter;
1095 const_tree arg;
1097 va_start (ap, callexpr);
1098 init_const_call_expr_arg_iterator (callexpr, &iter);
1100 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1101 tree fn = CALL_EXPR_FN (callexpr);
1102 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1104 for (unsigned argno = 1; ; ++argno)
1106 code = (enum tree_code) va_arg (ap, int);
1108 switch (code)
1110 case 0:
1111 /* This signifies an ellipses, any further arguments are all ok. */
1112 res = true;
1113 goto end;
1114 case VOID_TYPE:
1115 /* This signifies an endlink, if no arguments remain, return
1116 true, otherwise return false. */
1117 res = !more_const_call_expr_args_p (&iter);
1118 goto end;
1119 case POINTER_TYPE:
1120 /* The actual argument must be nonnull when either the whole
1121 called function has been declared nonnull, or when the formal
1122 argument corresponding to the actual argument has been. */
1123 if (argmap
1124 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1126 arg = next_const_call_expr_arg (&iter);
1127 if (!validate_arg (arg, code) || integer_zerop (arg))
1128 goto end;
1129 break;
1131 /* FALLTHRU */
1132 default:
1133 /* If no parameters remain or the parameter's code does not
1134 match the specified code, return false. Otherwise continue
1135 checking any remaining arguments. */
1136 arg = next_const_call_expr_arg (&iter);
1137 if (!validate_arg (arg, code))
1138 goto end;
1139 break;
1143 /* We need gotos here since we can only have one VA_CLOSE in a
1144 function. */
1145 end: ;
1146 va_end (ap);
1148 BITMAP_FREE (argmap);
1150 return res;
1153 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1154 and the address of the save area. */
1156 static rtx
1157 expand_builtin_nonlocal_goto (tree exp)
1159 tree t_label, t_save_area;
1160 rtx r_label, r_save_area, r_fp, r_sp;
1161 rtx_insn *insn;
1163 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1164 return NULL_RTX;
1166 t_label = CALL_EXPR_ARG (exp, 0);
1167 t_save_area = CALL_EXPR_ARG (exp, 1);
1169 r_label = expand_normal (t_label);
1170 r_label = convert_memory_address (Pmode, r_label);
1171 r_save_area = expand_normal (t_save_area);
1172 r_save_area = convert_memory_address (Pmode, r_save_area);
1173 /* Copy the address of the save location to a register just in case it was
1174 based on the frame pointer. */
1175 r_save_area = copy_to_reg (r_save_area);
1176 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1177 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1178 plus_constant (Pmode, r_save_area,
1179 GET_MODE_SIZE (Pmode)));
1181 crtl->has_nonlocal_goto = 1;
1183 /* ??? We no longer need to pass the static chain value, afaik. */
1184 if (targetm.have_nonlocal_goto ())
1185 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1186 else
1188 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1189 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1191 r_label = copy_to_reg (r_label);
1193 /* Restore the frame pointer and stack pointer. We must use a
1194 temporary since the setjmp buffer may be a local. */
1195 r_fp = copy_to_reg (r_fp);
1196 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1198 /* Ensure the frame pointer move is not optimized. */
1199 emit_insn (gen_blockage ());
1200 emit_clobber (hard_frame_pointer_rtx);
1201 emit_clobber (frame_pointer_rtx);
1202 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1204 /* USE of hard_frame_pointer_rtx added for consistency;
1205 not clear if really needed. */
1206 emit_use (hard_frame_pointer_rtx);
1207 emit_use (stack_pointer_rtx);
1209 /* If the architecture is using a GP register, we must
1210 conservatively assume that the target function makes use of it.
1211 The prologue of functions with nonlocal gotos must therefore
1212 initialize the GP register to the appropriate value, and we
1213 must then make sure that this value is live at the point
1214 of the jump. (Note that this doesn't necessarily apply
1215 to targets with a nonlocal_goto pattern; they are free
1216 to implement it in their own way. Note also that this is
1217 a no-op if the GP register is a global invariant.) */
1218 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1219 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1220 emit_use (pic_offset_table_rtx);
1222 emit_indirect_jump (r_label);
1225 /* Search backwards to the jump insn and mark it as a
1226 non-local goto. */
1227 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1229 if (JUMP_P (insn))
1231 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1232 break;
1234 else if (CALL_P (insn))
1235 break;
1238 return const0_rtx;
1241 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1242 (not all will be used on all machines) that was passed to __builtin_setjmp.
1243 It updates the stack pointer in that block to the current value. This is
1244 also called directly by the SJLJ exception handling code. */
1246 void
1247 expand_builtin_update_setjmp_buf (rtx buf_addr)
1249 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1250 buf_addr = convert_memory_address (Pmode, buf_addr);
1251 rtx stack_save
1252 = gen_rtx_MEM (sa_mode,
1253 memory_address
1254 (sa_mode,
1255 plus_constant (Pmode, buf_addr,
1256 2 * GET_MODE_SIZE (Pmode))));
1258 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1261 /* Expand a call to __builtin_prefetch. For a target that does not support
1262 data prefetch, evaluate the memory address argument in case it has side
1263 effects. */
1265 static void
1266 expand_builtin_prefetch (tree exp)
1268 tree arg0, arg1, arg2;
1269 int nargs;
1270 rtx op0, op1, op2;
1272 if (!validate_arglist (exp, POINTER_TYPE, 0))
1273 return;
1275 arg0 = CALL_EXPR_ARG (exp, 0);
1277 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1278 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1279 locality). */
1280 nargs = call_expr_nargs (exp);
1281 if (nargs > 1)
1282 arg1 = CALL_EXPR_ARG (exp, 1);
1283 else
1284 arg1 = integer_zero_node;
1285 if (nargs > 2)
1286 arg2 = CALL_EXPR_ARG (exp, 2);
1287 else
1288 arg2 = integer_three_node;
1290 /* Argument 0 is an address. */
1291 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1293 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1294 if (TREE_CODE (arg1) != INTEGER_CST)
1296 error ("second argument to %<__builtin_prefetch%> must be a constant");
1297 arg1 = integer_zero_node;
1299 op1 = expand_normal (arg1);
1300 /* Argument 1 must be either zero or one. */
1301 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1303 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1304 " using zero");
1305 op1 = const0_rtx;
1308 /* Argument 2 (locality) must be a compile-time constant int. */
1309 if (TREE_CODE (arg2) != INTEGER_CST)
1311 error ("third argument to %<__builtin_prefetch%> must be a constant");
1312 arg2 = integer_zero_node;
1314 op2 = expand_normal (arg2);
1315 /* Argument 2 must be 0, 1, 2, or 3. */
1316 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1318 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1319 op2 = const0_rtx;
1322 if (targetm.have_prefetch ())
1324 class expand_operand ops[3];
1326 create_address_operand (&ops[0], op0);
1327 create_integer_operand (&ops[1], INTVAL (op1));
1328 create_integer_operand (&ops[2], INTVAL (op2));
1329 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1330 return;
1333 /* Don't do anything with direct references to volatile memory, but
1334 generate code to handle other side effects. */
1335 if (!MEM_P (op0) && side_effects_p (op0))
1336 emit_insn (op0);
1339 /* Get a MEM rtx for expression EXP which is the address of an operand
1340 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1341 the maximum length of the block of memory that might be accessed or
1342 NULL if unknown. */
1345 get_memory_rtx (tree exp, tree len)
1347 tree orig_exp = exp, base;
1348 rtx addr, mem;
1350 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1351 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1352 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1353 exp = TREE_OPERAND (exp, 0);
1355 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1356 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1358 /* Get an expression we can use to find the attributes to assign to MEM.
1359 First remove any nops. */
1360 while (CONVERT_EXPR_P (exp)
1361 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1362 exp = TREE_OPERAND (exp, 0);
1364 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1365 (as builtin stringops may alias with anything). */
1366 exp = fold_build2 (MEM_REF,
1367 build_array_type (char_type_node,
1368 build_range_type (sizetype,
1369 size_one_node, len)),
1370 exp, build_int_cst (ptr_type_node, 0));
1372 /* If the MEM_REF has no acceptable address, try to get the base object
1373 from the original address we got, and build an all-aliasing
1374 unknown-sized access to that one. */
1375 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1376 set_mem_attributes (mem, exp, 0);
1377 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1378 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1379 0))))
1381 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1382 exp = build_fold_addr_expr (base);
1383 exp = fold_build2 (MEM_REF,
1384 build_array_type (char_type_node,
1385 build_range_type (sizetype,
1386 size_zero_node,
1387 NULL)),
1388 exp, build_int_cst (ptr_type_node, 0));
1389 set_mem_attributes (mem, exp, 0);
1390 /* Since we stripped parts make sure the offset is unknown and the
1391 alignment is computed from the original address. */
1392 clear_mem_offset (mem);
1393 set_mem_align (mem, align);
1395 set_mem_alias_set (mem, 0);
1396 return mem;
1399 /* Built-in functions to perform an untyped call and return. */
1401 #define apply_args_mode \
1402 (this_target_builtins->x_apply_args_mode)
1403 #define apply_result_mode \
1404 (this_target_builtins->x_apply_result_mode)
1406 /* Return the size required for the block returned by __builtin_apply_args,
1407 and initialize apply_args_mode. */
1409 static int
1410 apply_args_size (void)
1412 static int size = -1;
1413 int align;
1414 unsigned int regno;
1416 /* The values computed by this function never change. */
1417 if (size < 0)
1419 /* The first value is the incoming arg-pointer. */
1420 size = GET_MODE_SIZE (Pmode);
1422 /* The second value is the structure value address unless this is
1423 passed as an "invisible" first argument. */
1424 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1425 size += GET_MODE_SIZE (Pmode);
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if (FUNCTION_ARG_REGNO_P (regno))
1430 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1432 if (mode != VOIDmode)
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437 size += GET_MODE_SIZE (mode);
1438 apply_args_mode[regno] = mode;
1440 else
1441 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1443 else
1444 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1446 return size;
1449 /* Return the size required for the block returned by __builtin_apply,
1450 and initialize apply_result_mode. */
1452 static int
1453 apply_result_size (void)
1455 static int size = -1;
1456 int align, regno;
1458 /* The values computed by this function never change. */
1459 if (size < 0)
1461 size = 0;
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if (targetm.calls.function_value_regno_p (regno))
1466 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1468 if (mode != VOIDmode)
1470 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1471 if (size % align != 0)
1472 size = CEIL (size, align) * align;
1473 size += GET_MODE_SIZE (mode);
1474 apply_result_mode[regno] = mode;
1476 else
1477 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1479 else
1480 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1482 /* Allow targets that use untyped_call and untyped_return to override
1483 the size so that machine-specific information can be stored here. */
1484 #ifdef APPLY_RESULT_SIZE
1485 size = APPLY_RESULT_SIZE;
1486 #endif
1488 return size;
1491 /* Create a vector describing the result block RESULT. If SAVEP is true,
1492 the result block is used to save the values; otherwise it is used to
1493 restore the values. */
1495 static rtx
1496 result_vector (int savep, rtx result)
1498 int regno, size, align, nelts;
1499 fixed_size_mode mode;
1500 rtx reg, mem;
1501 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1503 size = nelts = 0;
1504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1505 if ((mode = apply_result_mode[regno]) != VOIDmode)
1507 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1508 if (size % align != 0)
1509 size = CEIL (size, align) * align;
1510 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1511 mem = adjust_address (result, mode, size);
1512 savevec[nelts++] = (savep
1513 ? gen_rtx_SET (mem, reg)
1514 : gen_rtx_SET (reg, mem));
1515 size += GET_MODE_SIZE (mode);
1517 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1520 /* Save the state required to perform an untyped call with the same
1521 arguments as were passed to the current function. */
1523 static rtx
1524 expand_builtin_apply_args_1 (void)
1526 rtx registers, tem;
1527 int size, align, regno;
1528 fixed_size_mode mode;
1529 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1531 /* Create a block where the arg-pointer, structure value address,
1532 and argument registers can be saved. */
1533 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1535 /* Walk past the arg-pointer and structure value address. */
1536 size = GET_MODE_SIZE (Pmode);
1537 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1538 size += GET_MODE_SIZE (Pmode);
1540 /* Save each register used in calling a function to the block. */
1541 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1542 if ((mode = apply_args_mode[regno]) != VOIDmode)
1544 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1545 if (size % align != 0)
1546 size = CEIL (size, align) * align;
1548 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1550 emit_move_insn (adjust_address (registers, mode, size), tem);
1551 size += GET_MODE_SIZE (mode);
1554 /* Save the arg pointer to the block. */
1555 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1556 /* We need the pointer as the caller actually passed them to us, not
1557 as we might have pretended they were passed. Make sure it's a valid
1558 operand, as emit_move_insn isn't expected to handle a PLUS. */
1559 if (STACK_GROWS_DOWNWARD)
1561 = force_operand (plus_constant (Pmode, tem,
1562 crtl->args.pretend_args_size),
1563 NULL_RTX);
1564 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1566 size = GET_MODE_SIZE (Pmode);
1568 /* Save the structure value address unless this is passed as an
1569 "invisible" first argument. */
1570 if (struct_incoming_value)
1571 emit_move_insn (adjust_address (registers, Pmode, size),
1572 copy_to_reg (struct_incoming_value));
1574 /* Return the address of the block. */
1575 return copy_addr_to_reg (XEXP (registers, 0));
1578 /* __builtin_apply_args returns block of memory allocated on
1579 the stack into which is stored the arg pointer, structure
1580 value address, static chain, and all the registers that might
1581 possibly be used in performing a function call. The code is
1582 moved to the start of the function so the incoming values are
1583 saved. */
1585 static rtx
1586 expand_builtin_apply_args (void)
1588 /* Don't do __builtin_apply_args more than once in a function.
1589 Save the result of the first call and reuse it. */
1590 if (apply_args_value != 0)
1591 return apply_args_value;
1593 /* When this function is called, it means that registers must be
1594 saved on entry to this function. So we migrate the
1595 call to the first insn of this function. */
1596 rtx temp;
1598 start_sequence ();
1599 temp = expand_builtin_apply_args_1 ();
1600 rtx_insn *seq = get_insns ();
1601 end_sequence ();
1603 apply_args_value = temp;
1605 /* Put the insns after the NOTE that starts the function.
1606 If this is inside a start_sequence, make the outer-level insn
1607 chain current, so the code is placed at the start of the
1608 function. If internal_arg_pointer is a non-virtual pseudo,
1609 it needs to be placed after the function that initializes
1610 that pseudo. */
1611 push_topmost_sequence ();
1612 if (REG_P (crtl->args.internal_arg_pointer)
1613 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1614 emit_insn_before (seq, parm_birth_insn);
1615 else
1616 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1617 pop_topmost_sequence ();
1618 return temp;
1622 /* Perform an untyped call and save the state required to perform an
1623 untyped return of whatever value was returned by the given function. */
1625 static rtx
1626 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1628 int size, align, regno;
1629 fixed_size_mode mode;
1630 rtx incoming_args, result, reg, dest, src;
1631 rtx_call_insn *call_insn;
1632 rtx old_stack_level = 0;
1633 rtx call_fusage = 0;
1634 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1636 arguments = convert_memory_address (Pmode, arguments);
1638 /* Create a block where the return registers can be saved. */
1639 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1641 /* Fetch the arg pointer from the ARGUMENTS block. */
1642 incoming_args = gen_reg_rtx (Pmode);
1643 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1644 if (!STACK_GROWS_DOWNWARD)
1645 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1646 incoming_args, 0, OPTAB_LIB_WIDEN);
1648 /* Push a new argument block and copy the arguments. Do not allow
1649 the (potential) memcpy call below to interfere with our stack
1650 manipulations. */
1651 do_pending_stack_adjust ();
1652 NO_DEFER_POP;
1654 /* Save the stack with nonlocal if available. */
1655 if (targetm.have_save_stack_nonlocal ())
1656 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1657 else
1658 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1660 /* Allocate a block of memory onto the stack and copy the memory
1661 arguments to the outgoing arguments address. We can pass TRUE
1662 as the 4th argument because we just saved the stack pointer
1663 and will restore it right after the call. */
1664 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1666 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1667 may have already set current_function_calls_alloca to true.
1668 current_function_calls_alloca won't be set if argsize is zero,
1669 so we have to guarantee need_drap is true here. */
1670 if (SUPPORTS_STACK_ALIGNMENT)
1671 crtl->need_drap = true;
1673 dest = virtual_outgoing_args_rtx;
1674 if (!STACK_GROWS_DOWNWARD)
1676 if (CONST_INT_P (argsize))
1677 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1678 else
1679 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1681 dest = gen_rtx_MEM (BLKmode, dest);
1682 set_mem_align (dest, PARM_BOUNDARY);
1683 src = gen_rtx_MEM (BLKmode, incoming_args);
1684 set_mem_align (src, PARM_BOUNDARY);
1685 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1687 /* Refer to the argument block. */
1688 apply_args_size ();
1689 arguments = gen_rtx_MEM (BLKmode, arguments);
1690 set_mem_align (arguments, PARM_BOUNDARY);
1692 /* Walk past the arg-pointer and structure value address. */
1693 size = GET_MODE_SIZE (Pmode);
1694 if (struct_value)
1695 size += GET_MODE_SIZE (Pmode);
1697 /* Restore each of the registers previously saved. Make USE insns
1698 for each of these registers for use in making the call. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_args_mode[regno]) != VOIDmode)
1702 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1703 if (size % align != 0)
1704 size = CEIL (size, align) * align;
1705 reg = gen_rtx_REG (mode, regno);
1706 emit_move_insn (reg, adjust_address (arguments, mode, size));
1707 use_reg (&call_fusage, reg);
1708 size += GET_MODE_SIZE (mode);
1711 /* Restore the structure value address unless this is passed as an
1712 "invisible" first argument. */
1713 size = GET_MODE_SIZE (Pmode);
1714 if (struct_value)
1716 rtx value = gen_reg_rtx (Pmode);
1717 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1718 emit_move_insn (struct_value, value);
1719 if (REG_P (struct_value))
1720 use_reg (&call_fusage, struct_value);
1723 /* All arguments and registers used for the call are set up by now! */
1724 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1726 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1727 and we don't want to load it into a register as an optimization,
1728 because prepare_call_address already did it if it should be done. */
1729 if (GET_CODE (function) != SYMBOL_REF)
1730 function = memory_address (FUNCTION_MODE, function);
1732 /* Generate the actual call instruction and save the return value. */
1733 if (targetm.have_untyped_call ())
1735 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1736 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1737 result_vector (1, result));
1738 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1739 if (CALL_P (insn))
1740 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1741 emit_insn (seq);
1743 else if (targetm.have_call_value ())
1745 rtx valreg = 0;
1747 /* Locate the unique return register. It is not possible to
1748 express a call that sets more than one return register using
1749 call_value; use untyped_call for that. In fact, untyped_call
1750 only needs to save the return registers in the given block. */
1751 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1752 if ((mode = apply_result_mode[regno]) != VOIDmode)
1754 gcc_assert (!valreg); /* have_untyped_call required. */
1756 valreg = gen_rtx_REG (mode, regno);
1759 emit_insn (targetm.gen_call_value (valreg,
1760 gen_rtx_MEM (FUNCTION_MODE, function),
1761 const0_rtx, NULL_RTX, const0_rtx));
1763 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1765 else
1766 gcc_unreachable ();
1768 /* Find the CALL insn we just emitted, and attach the register usage
1769 information. */
1770 call_insn = last_call_insn ();
1771 add_function_usage_to (call_insn, call_fusage);
1773 /* Restore the stack. */
1774 if (targetm.have_save_stack_nonlocal ())
1775 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1776 else
1777 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1778 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1780 OK_DEFER_POP;
1782 /* Return the address of the result block. */
1783 result = copy_addr_to_reg (XEXP (result, 0));
1784 return convert_memory_address (ptr_mode, result);
1787 /* Perform an untyped return. */
1789 static void
1790 expand_builtin_return (rtx result)
1792 int size, align, regno;
1793 fixed_size_mode mode;
1794 rtx reg;
1795 rtx_insn *call_fusage = 0;
1797 result = convert_memory_address (Pmode, result);
1799 apply_result_size ();
1800 result = gen_rtx_MEM (BLKmode, result);
1802 if (targetm.have_untyped_return ())
1804 rtx vector = result_vector (0, result);
1805 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1806 emit_barrier ();
1807 return;
1810 /* Restore the return value and note that each value is used. */
1811 size = 0;
1812 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1813 if ((mode = apply_result_mode[regno]) != VOIDmode)
1815 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1816 if (size % align != 0)
1817 size = CEIL (size, align) * align;
1818 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1819 emit_move_insn (reg, adjust_address (result, mode, size));
1821 push_to_sequence (call_fusage);
1822 emit_use (reg);
1823 call_fusage = get_insns ();
1824 end_sequence ();
1825 size += GET_MODE_SIZE (mode);
1828 /* Put the USE insns before the return. */
1829 emit_insn (call_fusage);
1831 /* Return whatever values was restored by jumping directly to the end
1832 of the function. */
1833 expand_naked_return ();
1836 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1839 type_to_class (tree type)
1841 switch (TREE_CODE (type))
1843 case VOID_TYPE: return void_type_class;
1844 case INTEGER_TYPE: return integer_type_class;
1845 case ENUMERAL_TYPE: return enumeral_type_class;
1846 case BOOLEAN_TYPE: return boolean_type_class;
1847 case POINTER_TYPE: return pointer_type_class;
1848 case REFERENCE_TYPE: return reference_type_class;
1849 case OFFSET_TYPE: return offset_type_class;
1850 case REAL_TYPE: return real_type_class;
1851 case COMPLEX_TYPE: return complex_type_class;
1852 case FUNCTION_TYPE: return function_type_class;
1853 case METHOD_TYPE: return method_type_class;
1854 case RECORD_TYPE: return record_type_class;
1855 case UNION_TYPE:
1856 case QUAL_UNION_TYPE: return union_type_class;
1857 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1858 ? string_type_class : array_type_class);
1859 case LANG_TYPE: return lang_type_class;
1860 case OPAQUE_TYPE: return opaque_type_class;
1861 case BITINT_TYPE: return bitint_type_class;
1862 case VECTOR_TYPE: return vector_type_class;
1863 default: return no_type_class;
1867 /* Expand a call EXP to __builtin_classify_type. */
1869 static rtx
1870 expand_builtin_classify_type (tree exp)
1872 if (call_expr_nargs (exp))
1873 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1874 return GEN_INT (no_type_class);
1877 /* This helper macro, meant to be used in mathfn_built_in below, determines
1878 which among a set of builtin math functions is appropriate for a given type
1879 mode. The `F' (float) and `L' (long double) are automatically generated
1880 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1881 types, there are additional types that are considered with 'F32', 'F64',
1882 'F128', etc. suffixes. */
1883 #define CASE_MATHFN(MATHFN) \
1884 CASE_CFN_##MATHFN: \
1885 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1886 fcodel = BUILT_IN_##MATHFN##L ; break;
1887 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1888 types. */
1889 #define CASE_MATHFN_FLOATN(MATHFN) \
1890 CASE_CFN_##MATHFN: \
1891 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1892 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1893 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1894 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1895 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1896 break;
1897 /* Similar to above, but appends _R after any F/L suffix. */
1898 #define CASE_MATHFN_REENT(MATHFN) \
1899 case CFN_BUILT_IN_##MATHFN##_R: \
1900 case CFN_BUILT_IN_##MATHFN##F_R: \
1901 case CFN_BUILT_IN_##MATHFN##L_R: \
1902 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1903 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1905 /* Return a function equivalent to FN but operating on floating-point
1906 values of type TYPE, or END_BUILTINS if no such function exists.
1907 This is purely an operation on function codes; it does not guarantee
1908 that the target actually has an implementation of the function. */
1910 static built_in_function
1911 mathfn_built_in_2 (tree type, combined_fn fn)
1913 tree mtype;
1914 built_in_function fcode, fcodef, fcodel;
1915 built_in_function fcodef16 = END_BUILTINS;
1916 built_in_function fcodef32 = END_BUILTINS;
1917 built_in_function fcodef64 = END_BUILTINS;
1918 built_in_function fcodef128 = END_BUILTINS;
1919 built_in_function fcodef32x = END_BUILTINS;
1920 built_in_function fcodef64x = END_BUILTINS;
1921 built_in_function fcodef128x = END_BUILTINS;
1923 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1924 break the uses below. */
1925 #undef HUGE_VAL
1926 #undef NAN
1928 switch (fn)
1930 #define SEQ_OF_CASE_MATHFN \
1931 CASE_MATHFN_FLOATN (ACOS) \
1932 CASE_MATHFN_FLOATN (ACOSH) \
1933 CASE_MATHFN_FLOATN (ASIN) \
1934 CASE_MATHFN_FLOATN (ASINH) \
1935 CASE_MATHFN_FLOATN (ATAN) \
1936 CASE_MATHFN_FLOATN (ATAN2) \
1937 CASE_MATHFN_FLOATN (ATANH) \
1938 CASE_MATHFN_FLOATN (CBRT) \
1939 CASE_MATHFN_FLOATN (CEIL) \
1940 CASE_MATHFN (CEXPI) \
1941 CASE_MATHFN_FLOATN (COPYSIGN) \
1942 CASE_MATHFN_FLOATN (COS) \
1943 CASE_MATHFN_FLOATN (COSH) \
1944 CASE_MATHFN (DREM) \
1945 CASE_MATHFN_FLOATN (ERF) \
1946 CASE_MATHFN_FLOATN (ERFC) \
1947 CASE_MATHFN_FLOATN (EXP) \
1948 CASE_MATHFN (EXP10) \
1949 CASE_MATHFN_FLOATN (EXP2) \
1950 CASE_MATHFN_FLOATN (EXPM1) \
1951 CASE_MATHFN_FLOATN (FABS) \
1952 CASE_MATHFN_FLOATN (FDIM) \
1953 CASE_MATHFN_FLOATN (FLOOR) \
1954 CASE_MATHFN_FLOATN (FMA) \
1955 CASE_MATHFN_FLOATN (FMAX) \
1956 CASE_MATHFN_FLOATN (FMIN) \
1957 CASE_MATHFN_FLOATN (FMOD) \
1958 CASE_MATHFN_FLOATN (FREXP) \
1959 CASE_MATHFN (GAMMA) \
1960 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1961 CASE_MATHFN_FLOATN (HUGE_VAL) \
1962 CASE_MATHFN_FLOATN (HYPOT) \
1963 CASE_MATHFN_FLOATN (ILOGB) \
1964 CASE_MATHFN (ICEIL) \
1965 CASE_MATHFN (IFLOOR) \
1966 CASE_MATHFN_FLOATN (INF) \
1967 CASE_MATHFN (IRINT) \
1968 CASE_MATHFN (IROUND) \
1969 CASE_MATHFN (ISINF) \
1970 CASE_MATHFN (J0) \
1971 CASE_MATHFN (J1) \
1972 CASE_MATHFN (JN) \
1973 CASE_MATHFN (LCEIL) \
1974 CASE_MATHFN_FLOATN (LDEXP) \
1975 CASE_MATHFN (LFLOOR) \
1976 CASE_MATHFN_FLOATN (LGAMMA) \
1977 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1978 CASE_MATHFN (LLCEIL) \
1979 CASE_MATHFN (LLFLOOR) \
1980 CASE_MATHFN_FLOATN (LLRINT) \
1981 CASE_MATHFN_FLOATN (LLROUND) \
1982 CASE_MATHFN_FLOATN (LOG) \
1983 CASE_MATHFN_FLOATN (LOG10) \
1984 CASE_MATHFN_FLOATN (LOG1P) \
1985 CASE_MATHFN_FLOATN (LOG2) \
1986 CASE_MATHFN_FLOATN (LOGB) \
1987 CASE_MATHFN_FLOATN (LRINT) \
1988 CASE_MATHFN_FLOATN (LROUND) \
1989 CASE_MATHFN_FLOATN (MODF) \
1990 CASE_MATHFN_FLOATN (NAN) \
1991 CASE_MATHFN_FLOATN (NANS) \
1992 CASE_MATHFN_FLOATN (NEARBYINT) \
1993 CASE_MATHFN_FLOATN (NEXTAFTER) \
1994 CASE_MATHFN (NEXTTOWARD) \
1995 CASE_MATHFN_FLOATN (POW) \
1996 CASE_MATHFN (POWI) \
1997 CASE_MATHFN (POW10) \
1998 CASE_MATHFN_FLOATN (REMAINDER) \
1999 CASE_MATHFN_FLOATN (REMQUO) \
2000 CASE_MATHFN_FLOATN (RINT) \
2001 CASE_MATHFN_FLOATN (ROUND) \
2002 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2003 CASE_MATHFN (SCALB) \
2004 CASE_MATHFN_FLOATN (SCALBLN) \
2005 CASE_MATHFN_FLOATN (SCALBN) \
2006 CASE_MATHFN (SIGNBIT) \
2007 CASE_MATHFN (SIGNIFICAND) \
2008 CASE_MATHFN_FLOATN (SIN) \
2009 CASE_MATHFN (SINCOS) \
2010 CASE_MATHFN_FLOATN (SINH) \
2011 CASE_MATHFN_FLOATN (SQRT) \
2012 CASE_MATHFN_FLOATN (TAN) \
2013 CASE_MATHFN_FLOATN (TANH) \
2014 CASE_MATHFN_FLOATN (TGAMMA) \
2015 CASE_MATHFN_FLOATN (TRUNC) \
2016 CASE_MATHFN (Y0) \
2017 CASE_MATHFN (Y1) \
2018 CASE_MATHFN (YN)
2020 SEQ_OF_CASE_MATHFN
2022 default:
2023 return END_BUILTINS;
2026 mtype = TYPE_MAIN_VARIANT (type);
2027 if (mtype == double_type_node)
2028 return fcode;
2029 else if (mtype == float_type_node)
2030 return fcodef;
2031 else if (mtype == long_double_type_node)
2032 return fcodel;
2033 else if (mtype == float16_type_node)
2034 return fcodef16;
2035 else if (mtype == float32_type_node)
2036 return fcodef32;
2037 else if (mtype == float64_type_node)
2038 return fcodef64;
2039 else if (mtype == float128_type_node)
2040 return fcodef128;
2041 else if (mtype == float32x_type_node)
2042 return fcodef32x;
2043 else if (mtype == float64x_type_node)
2044 return fcodef64x;
2045 else if (mtype == float128x_type_node)
2046 return fcodef128x;
2047 else
2048 return END_BUILTINS;
2051 #undef CASE_MATHFN
2052 #undef CASE_MATHFN_FLOATN
2053 #undef CASE_MATHFN_REENT
2055 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2056 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2057 otherwise use the explicit declaration. If we can't do the conversion,
2058 return null. */
2060 static tree
2061 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2063 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2064 if (fcode2 == END_BUILTINS)
2065 return NULL_TREE;
2067 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2068 return NULL_TREE;
2070 return builtin_decl_explicit (fcode2);
2073 /* Like mathfn_built_in_1, but always use the implicit array. */
2075 tree
2076 mathfn_built_in (tree type, combined_fn fn)
2078 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2081 /* Like mathfn_built_in_1, but always use the explicit array. */
2083 tree
2084 mathfn_built_in_explicit (tree type, combined_fn fn)
2086 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2089 /* Like mathfn_built_in_1, but take a built_in_function and
2090 always use the implicit array. */
2092 tree
2093 mathfn_built_in (tree type, enum built_in_function fn)
2095 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2098 /* Return the type associated with a built in function, i.e., the one
2099 to be passed to mathfn_built_in to get the type-specific
2100 function. */
2102 tree
2103 mathfn_built_in_type (combined_fn fn)
2105 #define CASE_MATHFN(MATHFN) \
2106 case CFN_BUILT_IN_##MATHFN: \
2107 return double_type_node; \
2108 case CFN_BUILT_IN_##MATHFN##F: \
2109 return float_type_node; \
2110 case CFN_BUILT_IN_##MATHFN##L: \
2111 return long_double_type_node;
2113 #define CASE_MATHFN_FLOATN(MATHFN) \
2114 CASE_MATHFN(MATHFN) \
2115 case CFN_BUILT_IN_##MATHFN##F16: \
2116 return float16_type_node; \
2117 case CFN_BUILT_IN_##MATHFN##F32: \
2118 return float32_type_node; \
2119 case CFN_BUILT_IN_##MATHFN##F64: \
2120 return float64_type_node; \
2121 case CFN_BUILT_IN_##MATHFN##F128: \
2122 return float128_type_node; \
2123 case CFN_BUILT_IN_##MATHFN##F32X: \
2124 return float32x_type_node; \
2125 case CFN_BUILT_IN_##MATHFN##F64X: \
2126 return float64x_type_node; \
2127 case CFN_BUILT_IN_##MATHFN##F128X: \
2128 return float128x_type_node;
2130 /* Similar to above, but appends _R after any F/L suffix. */
2131 #define CASE_MATHFN_REENT(MATHFN) \
2132 case CFN_BUILT_IN_##MATHFN##_R: \
2133 return double_type_node; \
2134 case CFN_BUILT_IN_##MATHFN##F_R: \
2135 return float_type_node; \
2136 case CFN_BUILT_IN_##MATHFN##L_R: \
2137 return long_double_type_node;
2139 switch (fn)
2141 SEQ_OF_CASE_MATHFN
2143 default:
2144 return NULL_TREE;
2147 #undef CASE_MATHFN
2148 #undef CASE_MATHFN_FLOATN
2149 #undef CASE_MATHFN_REENT
2150 #undef SEQ_OF_CASE_MATHFN
2153 /* Check whether there is an internal function associated with function FN
2154 and return type RETURN_TYPE. Return the function if so, otherwise return
2155 IFN_LAST.
2157 Note that this function only tests whether the function is defined in
2158 internals.def, not whether it is actually available on the target. */
2160 static internal_fn
2161 associated_internal_fn (built_in_function fn, tree return_type)
2163 switch (fn)
2165 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2167 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2168 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2169 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2170 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2171 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2172 #include "internal-fn.def"
2174 CASE_FLT_FN (BUILT_IN_POW10):
2175 return IFN_EXP10;
2177 CASE_FLT_FN (BUILT_IN_DREM):
2178 return IFN_REMAINDER;
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2183 return IFN_LDEXP;
2184 return IFN_LAST;
2186 default:
2187 return IFN_LAST;
2191 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2192 return its code, otherwise return IFN_LAST. Note that this function
2193 only tests whether the function is defined in internals.def, not whether
2194 it is actually available on the target. */
2196 internal_fn
2197 associated_internal_fn (tree fndecl)
2199 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2200 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2201 TREE_TYPE (TREE_TYPE (fndecl)));
2204 /* Check whether there is an internal function associated with function CFN
2205 and return type RETURN_TYPE. Return the function if so, otherwise return
2206 IFN_LAST.
2208 Note that this function only tests whether the function is defined in
2209 internals.def, not whether it is actually available on the target. */
2211 internal_fn
2212 associated_internal_fn (combined_fn cfn, tree return_type)
2214 if (internal_fn_p (cfn))
2215 return as_internal_fn (cfn);
2216 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2219 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2220 on the current target by a call to an internal function, return the
2221 code of that internal function, otherwise return IFN_LAST. The caller
2222 is responsible for ensuring that any side-effects of the built-in
2223 call are dealt with correctly. E.g. if CALL sets errno, the caller
2224 must decide that the errno result isn't needed or make it available
2225 in some other way. */
2227 internal_fn
2228 replacement_internal_fn (gcall *call)
2230 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2232 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2233 if (ifn != IFN_LAST)
2235 tree_pair types = direct_internal_fn_types (ifn, call);
2236 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2237 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2238 return ifn;
2241 return IFN_LAST;
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2251 static rtx
2252 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254 optab builtin_optab;
2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
2259 machine_mode mode;
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2268 switch (DECL_FUNCTION_CODE (fndecl))
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2272 builtin_optab = fma_optab; break;
2273 default:
2274 gcc_unreachable ();
2277 /* Make a suitable register to place result in. */
2278 mode = TYPE_MODE (TREE_TYPE (exp));
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2282 return NULL_RTX;
2284 result = gen_reg_rtx (mode);
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2288 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2289 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2291 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2292 op1 = expand_normal (arg1);
2293 op2 = expand_normal (arg2);
2295 start_sequence ();
2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2300 result, 0);
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
2305 if (result == 0)
2307 end_sequence ();
2308 return expand_call (exp, target, target == const0_rtx);
2311 /* Output the entire sequence. */
2312 insns = get_insns ();
2313 end_sequence ();
2314 emit_insn (insns);
2316 return result;
2319 /* Expand a call to the builtin sin and cos math functions.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2324 operands. */
2326 static rtx
2327 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2329 optab builtin_optab;
2330 rtx op0;
2331 rtx_insn *insns;
2332 tree fndecl = get_callee_fndecl (exp);
2333 machine_mode mode;
2334 tree arg;
2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
2339 arg = CALL_EXPR_ARG (exp, 0);
2341 switch (DECL_FUNCTION_CODE (fndecl))
2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
2345 builtin_optab = sincos_optab; break;
2346 default:
2347 gcc_unreachable ();
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2356 switch (DECL_FUNCTION_CODE (fndecl))
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 builtin_optab = sin_optab; break;
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = cos_optab; break;
2362 default:
2363 gcc_unreachable ();
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2369 rtx result = gen_reg_rtx (mode);
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2378 start_sequence ();
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab == sincos_optab)
2384 int ok;
2386 switch (DECL_FUNCTION_CODE (fndecl))
2388 CASE_FLT_FN (BUILT_IN_SIN):
2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2390 break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2393 break;
2394 default:
2395 gcc_unreachable ();
2397 gcc_assert (ok);
2399 else
2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
2402 if (result != 0)
2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
2408 return result;
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2417 return expand_call (exp, target, target == const0_rtx);
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg, tree fndecl)
2427 bool errno_set = false;
2428 optab builtin_optab = unknown_optab;
2429 machine_mode mode;
2431 switch (DECL_FUNCTION_CODE (fndecl))
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
2437 case BUILT_IN_ISNORMAL:
2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
2446 /* These builtins have no optabs (yet). */
2447 break;
2448 default:
2449 gcc_unreachable ();
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
2454 return CODE_FOR_nothing;
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2459 if (builtin_optab)
2460 return optab_handler (builtin_optab, mode);
2461 return CODE_FOR_nothing;
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2471 static rtx
2472 expand_builtin_interclass_mathfn (tree exp, rtx target)
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
2477 machine_mode mode;
2478 tree arg;
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2487 if (icode != CODE_FOR_nothing)
2489 class expand_operand ops[1];
2490 rtx_insn *last = get_last_insn ();
2491 tree orig_arg = arg;
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
2512 return NULL_RTX;
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2520 static rtx
2521 expand_builtin_sincos (tree exp)
2523 rtx op0, op1, op2, target1, target2;
2524 machine_mode mode;
2525 tree arg, sinp, cosp;
2526 int result;
2527 location_t loc = EXPR_LOCATION (exp);
2528 tree alias_type, alias_off;
2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2543 return NULL_RTX;
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2548 op0 = expand_normal (arg);
2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2566 return const0_rtx;
2569 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2570 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2571 static rtx
2572 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2574 if (!validate_arglist (exp, VOID_TYPE))
2575 return NULL_RTX;
2577 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2578 if (icode == CODE_FOR_nothing)
2579 return NULL_RTX;
2581 if (target == 0
2582 || GET_MODE (target) != target_mode
2583 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2584 target = gen_reg_rtx (target_mode);
2586 rtx pat = GEN_FCN (icode) (target);
2587 if (!pat)
2588 return NULL_RTX;
2589 emit_insn (pat);
2591 return target;
2594 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2595 fenv.h), returning the result and setting it in TARGET. Otherwise return
2596 NULL_RTX on failure. */
2597 static rtx
2598 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2599 machine_mode target_mode, optab op_optab)
2601 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2602 return NULL_RTX;
2603 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2605 insn_code icode = direct_optab_handler (op_optab, SImode);
2606 if (icode == CODE_FOR_nothing)
2607 return NULL_RTX;
2609 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2610 return NULL_RTX;
2612 if (target == 0
2613 || GET_MODE (target) != target_mode
2614 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2615 target = gen_reg_rtx (target_mode);
2617 rtx pat = GEN_FCN (icode) (target, op0);
2618 if (!pat)
2619 return NULL_RTX;
2620 emit_insn (pat);
2622 return target;
2625 /* Expand a call to the internal cexpi builtin to the sincos math function.
2626 EXP is the expression that is a call to the builtin function; if convenient,
2627 the result should be placed in TARGET. */
2629 static rtx
2630 expand_builtin_cexpi (tree exp, rtx target)
2632 tree fndecl = get_callee_fndecl (exp);
2633 tree arg, type;
2634 machine_mode mode;
2635 rtx op0, op1, op2;
2636 location_t loc = EXPR_LOCATION (exp);
2638 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2639 return NULL_RTX;
2641 arg = CALL_EXPR_ARG (exp, 0);
2642 type = TREE_TYPE (arg);
2643 mode = TYPE_MODE (TREE_TYPE (arg));
2645 /* Try expanding via a sincos optab, fall back to emitting a libcall
2646 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2647 is only generated from sincos, cexp or if we have either of them. */
2648 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2650 op1 = gen_reg_rtx (mode);
2651 op2 = gen_reg_rtx (mode);
2653 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2655 /* Compute into op1 and op2. */
2656 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2658 else if (targetm.libc_has_function (function_sincos, type))
2660 tree call, fn = NULL_TREE;
2661 tree top1, top2;
2662 rtx op1a, op2a;
2664 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2665 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2667 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2668 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2669 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2670 else
2671 gcc_unreachable ();
2673 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2674 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2675 op1a = copy_addr_to_reg (XEXP (op1, 0));
2676 op2a = copy_addr_to_reg (XEXP (op2, 0));
2677 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2678 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2680 /* Make sure not to fold the sincos call again. */
2681 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2682 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2683 call, 3, arg, top1, top2));
2685 else
2687 tree call, fn = NULL_TREE, narg;
2688 tree ctype = build_complex_type (type);
2690 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2691 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2692 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2693 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2694 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2695 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2696 else
2697 gcc_unreachable ();
2699 /* If we don't have a decl for cexp create one. This is the
2700 friendliest fallback if the user calls __builtin_cexpi
2701 without full target C99 function support. */
2702 if (fn == NULL_TREE)
2704 tree fntype;
2705 const char *name = NULL;
2707 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2708 name = "cexpf";
2709 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2710 name = "cexp";
2711 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2712 name = "cexpl";
2714 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2715 fn = build_fn_decl (name, fntype);
2718 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2719 build_real (type, dconst0), arg);
2721 /* Make sure not to fold the cexp call again. */
2722 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2723 return expand_expr (build_call_nary (ctype, call, 1, narg),
2724 target, VOIDmode, EXPAND_NORMAL);
2727 /* Now build the proper return type. */
2728 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2729 make_tree (TREE_TYPE (arg), op2),
2730 make_tree (TREE_TYPE (arg), op1)),
2731 target, VOIDmode, EXPAND_NORMAL);
2734 /* Conveniently construct a function call expression. FNDECL names the
2735 function to be called, N is the number of arguments, and the "..."
2736 parameters are the argument expressions. Unlike build_call_exr
2737 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2739 static tree
2740 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2742 va_list ap;
2743 tree fntype = TREE_TYPE (fndecl);
2744 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2746 va_start (ap, n);
2747 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2748 va_end (ap);
2749 SET_EXPR_LOCATION (fn, loc);
2750 return fn;
2753 /* Expand the __builtin_issignaling builtin. This needs to handle
2754 all floating point formats that do support NaNs (for those that
2755 don't it just sets target to 0). */
2757 static rtx
2758 expand_builtin_issignaling (tree exp, rtx target)
2760 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2761 return NULL_RTX;
2763 tree arg = CALL_EXPR_ARG (exp, 0);
2764 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2765 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2767 /* Expand the argument yielding a RTX expression. */
2768 rtx temp = expand_normal (arg);
2770 /* If mode doesn't support NaN, always return 0.
2771 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2772 __builtin_issignaling working without -fsignaling-nans. Especially
2773 when -fno-signaling-nans is the default.
2774 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2775 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2776 fold to 0 or non-NaN/Inf classification. */
2777 if (!HONOR_NANS (fmode))
2779 emit_move_insn (target, const0_rtx);
2780 return target;
2783 /* Check if the back end provides an insn that handles issignaling for the
2784 argument's mode. */
2785 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2786 if (icode != CODE_FOR_nothing)
2788 rtx_insn *last = get_last_insn ();
2789 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2790 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2791 return this_target;
2792 delete_insns_since (last);
2795 if (DECIMAL_FLOAT_MODE_P (fmode))
2797 scalar_int_mode imode;
2798 rtx hi;
2799 switch (fmt->ieee_bits)
2801 case 32:
2802 case 64:
2803 imode = int_mode_for_mode (fmode).require ();
2804 temp = gen_lowpart (imode, temp);
2805 break;
2806 case 128:
2807 imode = int_mode_for_size (64, 1).require ();
2808 hi = NULL_RTX;
2809 /* For decimal128, TImode support isn't always there and even when
2810 it is, working on the DImode high part is usually better. */
2811 if (!MEM_P (temp))
2813 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2814 subreg_highpart_offset (imode,
2815 fmode)))
2816 hi = t;
2817 else
2819 scalar_int_mode imode2;
2820 if (int_mode_for_mode (fmode).exists (&imode2))
2822 rtx temp2 = gen_lowpart (imode2, temp);
2823 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2824 if (rtx t = simplify_gen_subreg (imode, temp2,
2825 imode2, off))
2826 hi = t;
2829 if (!hi)
2831 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2832 emit_move_insn (mem, temp);
2833 temp = mem;
2836 if (!hi)
2838 poly_int64 offset
2839 = subreg_highpart_offset (imode, GET_MODE (temp));
2840 hi = adjust_address (temp, imode, offset);
2842 temp = hi;
2843 break;
2844 default:
2845 gcc_unreachable ();
2847 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2848 have 6 bits below it all set. */
2849 rtx val
2850 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2851 temp = expand_binop (imode, and_optab, temp, val,
2852 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2853 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2854 return temp;
2857 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2858 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2859 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2860 gcc_assert (MODE_COMPOSITE_P (fmode)
2861 || (fmt->pnan == fmt->p
2862 && fmt->signbit_ro == fmt->signbit_rw));
2864 switch (fmt->p)
2866 case 106: /* IBM double double */
2867 /* For IBM double double, recurse on the most significant double. */
2868 gcc_assert (MODE_COMPOSITE_P (fmode));
2869 temp = convert_modes (DFmode, fmode, temp, 0);
2870 fmode = DFmode;
2871 fmt = REAL_MODE_FORMAT (DFmode);
2872 /* FALLTHRU */
2873 case 8: /* bfloat */
2874 case 11: /* IEEE half */
2875 case 24: /* IEEE single */
2876 case 53: /* IEEE double or Intel extended with rounding to double */
2877 if (fmt->p == 53 && fmt->signbit_ro == 79)
2878 goto extended;
2880 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2881 temp = gen_lowpart (imode, temp);
2882 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2883 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2884 if (fmt->qnan_msb_set)
2886 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2887 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2888 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2889 ((temp ^ bit) & mask) > val. */
2890 temp = expand_binop (imode, xor_optab, temp, bit,
2891 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2892 temp = expand_binop (imode, and_optab, temp, mask,
2893 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2894 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2895 1, 1);
2897 else
2899 /* For MIPS/PA IEEE single/double, expand to:
2900 (temp & val) == val. */
2901 temp = expand_binop (imode, and_optab, temp, val,
2902 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2903 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2904 1, 1);
2907 break;
2908 case 113: /* IEEE quad */
2910 rtx hi = NULL_RTX, lo = NULL_RTX;
2911 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2912 /* For IEEE quad, TImode support isn't always there and even when
2913 it is, working on DImode parts is usually better. */
2914 if (!MEM_P (temp))
2916 hi = simplify_gen_subreg (imode, temp, fmode,
2917 subreg_highpart_offset (imode, fmode));
2918 lo = simplify_gen_subreg (imode, temp, fmode,
2919 subreg_lowpart_offset (imode, fmode));
2920 if (!hi || !lo)
2922 scalar_int_mode imode2;
2923 if (int_mode_for_mode (fmode).exists (&imode2))
2925 rtx temp2 = gen_lowpart (imode2, temp);
2926 hi = simplify_gen_subreg (imode, temp2, imode2,
2927 subreg_highpart_offset (imode,
2928 imode2));
2929 lo = simplify_gen_subreg (imode, temp2, imode2,
2930 subreg_lowpart_offset (imode,
2931 imode2));
2934 if (!hi || !lo)
2936 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2937 emit_move_insn (mem, temp);
2938 temp = mem;
2941 if (!hi || !lo)
2943 poly_int64 offset
2944 = subreg_highpart_offset (imode, GET_MODE (temp));
2945 hi = adjust_address (temp, imode, offset);
2946 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2947 lo = adjust_address (temp, imode, offset);
2949 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2950 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2951 if (fmt->qnan_msb_set)
2953 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2954 - 64)));
2955 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2956 /* For non-MIPS/PA IEEE quad, expand to:
2957 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2958 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2959 lo = expand_binop (imode, ior_optab, lo, nlo,
2960 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2961 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2962 temp = expand_binop (imode, xor_optab, hi, bit,
2963 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2964 temp = expand_binop (imode, ior_optab, temp, lo,
2965 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2966 temp = expand_binop (imode, and_optab, temp, mask,
2967 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2968 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2969 1, 1);
2971 else
2973 /* For MIPS/PA IEEE quad, expand to:
2974 (hi & val) == val. */
2975 temp = expand_binop (imode, and_optab, hi, val,
2976 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2977 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2978 1, 1);
2981 break;
2982 case 64: /* Intel or Motorola extended */
2983 extended:
2985 rtx ex, hi, lo;
2986 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
2987 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
2988 if (!MEM_P (temp))
2990 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2991 emit_move_insn (mem, temp);
2992 temp = mem;
2994 if (fmt->signbit_ro == 95)
2996 /* Motorola, always big endian, with 16-bit gap in between
2997 16-bit sign+exponent and 64-bit mantissa. */
2998 ex = adjust_address (temp, iemode, 0);
2999 hi = adjust_address (temp, imode, 4);
3000 lo = adjust_address (temp, imode, 8);
3002 else if (!WORDS_BIG_ENDIAN)
3004 /* Intel little endian, 64-bit mantissa followed by 16-bit
3005 sign+exponent and then either 16 or 48 bits of gap. */
3006 ex = adjust_address (temp, iemode, 8);
3007 hi = adjust_address (temp, imode, 4);
3008 lo = adjust_address (temp, imode, 0);
3010 else
3012 /* Big endian Itanium. */
3013 ex = adjust_address (temp, iemode, 0);
3014 hi = adjust_address (temp, imode, 2);
3015 lo = adjust_address (temp, imode, 6);
3017 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3018 gcc_assert (fmt->qnan_msb_set);
3019 rtx mask = GEN_INT (0x7fff);
3020 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3021 /* For Intel/Motorola extended format, expand to:
3022 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3023 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3024 lo = expand_binop (imode, ior_optab, lo, nlo,
3025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3026 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3027 temp = expand_binop (imode, xor_optab, hi, bit,
3028 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3029 temp = expand_binop (imode, ior_optab, temp, lo,
3030 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3031 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3032 ex = expand_binop (iemode, and_optab, ex, mask,
3033 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3034 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3035 ex, mask, iemode, 1, 1);
3036 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3037 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3039 break;
3040 default:
3041 gcc_unreachable ();
3044 return temp;
3047 /* Expand a call to one of the builtin rounding functions gcc defines
3048 as an extension (lfloor and lceil). As these are gcc extensions we
3049 do not need to worry about setting errno to EDOM.
3050 If expanding via optab fails, lower expression to (int)(floor(x)).
3051 EXP is the expression that is a call to the builtin function;
3052 if convenient, the result should be placed in TARGET. */
3054 static rtx
3055 expand_builtin_int_roundingfn (tree exp, rtx target)
3057 convert_optab builtin_optab;
3058 rtx op0, tmp;
3059 rtx_insn *insns;
3060 tree fndecl = get_callee_fndecl (exp);
3061 enum built_in_function fallback_fn;
3062 tree fallback_fndecl;
3063 machine_mode mode;
3064 tree arg;
3066 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3067 return NULL_RTX;
3069 arg = CALL_EXPR_ARG (exp, 0);
3071 switch (DECL_FUNCTION_CODE (fndecl))
3073 CASE_FLT_FN (BUILT_IN_ICEIL):
3074 CASE_FLT_FN (BUILT_IN_LCEIL):
3075 CASE_FLT_FN (BUILT_IN_LLCEIL):
3076 builtin_optab = lceil_optab;
3077 fallback_fn = BUILT_IN_CEIL;
3078 break;
3080 CASE_FLT_FN (BUILT_IN_IFLOOR):
3081 CASE_FLT_FN (BUILT_IN_LFLOOR):
3082 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3083 builtin_optab = lfloor_optab;
3084 fallback_fn = BUILT_IN_FLOOR;
3085 break;
3087 default:
3088 gcc_unreachable ();
3091 /* Make a suitable register to place result in. */
3092 mode = TYPE_MODE (TREE_TYPE (exp));
3094 target = gen_reg_rtx (mode);
3096 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3097 need to expand the argument again. This way, we will not perform
3098 side-effects more the once. */
3099 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3101 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3103 start_sequence ();
3105 /* Compute into TARGET. */
3106 if (expand_sfix_optab (target, op0, builtin_optab))
3108 /* Output the entire sequence. */
3109 insns = get_insns ();
3110 end_sequence ();
3111 emit_insn (insns);
3112 return target;
3115 /* If we were unable to expand via the builtin, stop the sequence
3116 (without outputting the insns). */
3117 end_sequence ();
3119 /* Fall back to floating point rounding optab. */
3120 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3122 /* For non-C99 targets we may end up without a fallback fndecl here
3123 if the user called __builtin_lfloor directly. In this case emit
3124 a call to the floor/ceil variants nevertheless. This should result
3125 in the best user experience for not full C99 targets. */
3126 if (fallback_fndecl == NULL_TREE)
3128 tree fntype;
3129 const char *name = NULL;
3131 switch (DECL_FUNCTION_CODE (fndecl))
3133 case BUILT_IN_ICEIL:
3134 case BUILT_IN_LCEIL:
3135 case BUILT_IN_LLCEIL:
3136 name = "ceil";
3137 break;
3138 case BUILT_IN_ICEILF:
3139 case BUILT_IN_LCEILF:
3140 case BUILT_IN_LLCEILF:
3141 name = "ceilf";
3142 break;
3143 case BUILT_IN_ICEILL:
3144 case BUILT_IN_LCEILL:
3145 case BUILT_IN_LLCEILL:
3146 name = "ceill";
3147 break;
3148 case BUILT_IN_IFLOOR:
3149 case BUILT_IN_LFLOOR:
3150 case BUILT_IN_LLFLOOR:
3151 name = "floor";
3152 break;
3153 case BUILT_IN_IFLOORF:
3154 case BUILT_IN_LFLOORF:
3155 case BUILT_IN_LLFLOORF:
3156 name = "floorf";
3157 break;
3158 case BUILT_IN_IFLOORL:
3159 case BUILT_IN_LFLOORL:
3160 case BUILT_IN_LLFLOORL:
3161 name = "floorl";
3162 break;
3163 default:
3164 gcc_unreachable ();
3167 fntype = build_function_type_list (TREE_TYPE (arg),
3168 TREE_TYPE (arg), NULL_TREE);
3169 fallback_fndecl = build_fn_decl (name, fntype);
3172 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3174 tmp = expand_normal (exp);
3175 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3177 /* Truncate the result of floating point optab to integer
3178 via expand_fix (). */
3179 target = gen_reg_rtx (mode);
3180 expand_fix (target, tmp, 0);
3182 return target;
3185 /* Expand a call to one of the builtin math functions doing integer
3186 conversion (lrint).
3187 Return 0 if a normal call should be emitted rather than expanding the
3188 function in-line. EXP is the expression that is a call to the builtin
3189 function; if convenient, the result should be placed in TARGET. */
3191 static rtx
3192 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3194 convert_optab builtin_optab;
3195 rtx op0;
3196 rtx_insn *insns;
3197 tree fndecl = get_callee_fndecl (exp);
3198 tree arg;
3199 machine_mode mode;
3200 enum built_in_function fallback_fn = BUILT_IN_NONE;
3202 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3203 return NULL_RTX;
3205 arg = CALL_EXPR_ARG (exp, 0);
3207 switch (DECL_FUNCTION_CODE (fndecl))
3209 CASE_FLT_FN (BUILT_IN_IRINT):
3210 fallback_fn = BUILT_IN_LRINT;
3211 gcc_fallthrough ();
3212 CASE_FLT_FN (BUILT_IN_LRINT):
3213 CASE_FLT_FN (BUILT_IN_LLRINT):
3214 builtin_optab = lrint_optab;
3215 break;
3217 CASE_FLT_FN (BUILT_IN_IROUND):
3218 fallback_fn = BUILT_IN_LROUND;
3219 gcc_fallthrough ();
3220 CASE_FLT_FN (BUILT_IN_LROUND):
3221 CASE_FLT_FN (BUILT_IN_LLROUND):
3222 builtin_optab = lround_optab;
3223 break;
3225 default:
3226 gcc_unreachable ();
3229 /* There's no easy way to detect the case we need to set EDOM. */
3230 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3231 return NULL_RTX;
3233 /* Make a suitable register to place result in. */
3234 mode = TYPE_MODE (TREE_TYPE (exp));
3236 /* There's no easy way to detect the case we need to set EDOM. */
3237 if (!flag_errno_math)
3239 rtx result = gen_reg_rtx (mode);
3241 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3242 need to expand the argument again. This way, we will not perform
3243 side-effects more the once. */
3244 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3246 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3248 start_sequence ();
3250 if (expand_sfix_optab (result, op0, builtin_optab))
3252 /* Output the entire sequence. */
3253 insns = get_insns ();
3254 end_sequence ();
3255 emit_insn (insns);
3256 return result;
3259 /* If we were unable to expand via the builtin, stop the sequence
3260 (without outputting the insns) and call to the library function
3261 with the stabilized argument list. */
3262 end_sequence ();
3265 if (fallback_fn != BUILT_IN_NONE)
3267 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3268 targets, (int) round (x) should never be transformed into
3269 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3270 a call to lround in the hope that the target provides at least some
3271 C99 functions. This should result in the best user experience for
3272 not full C99 targets.
3273 As scalar float conversions with same mode are useless in GIMPLE,
3274 we can end up e.g. with _Float32 argument passed to float builtin,
3275 try to get the type from the builtin prototype first. */
3276 tree fallback_fndecl = NULL_TREE;
3277 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3278 fallback_fndecl
3279 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3280 as_combined_fn (fallback_fn), 0);
3281 if (fallback_fndecl == NULL_TREE)
3282 fallback_fndecl
3283 = mathfn_built_in_1 (TREE_TYPE (arg),
3284 as_combined_fn (fallback_fn), 0);
3285 if (fallback_fndecl)
3287 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3288 fallback_fndecl, 1, arg);
3290 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3291 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3292 return convert_to_mode (mode, target, 0);
3296 return expand_call (exp, target, target == const0_rtx);
3299 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3300 a normal call should be emitted rather than expanding the function
3301 in-line. EXP is the expression that is a call to the builtin
3302 function; if convenient, the result should be placed in TARGET. */
3304 static rtx
3305 expand_builtin_powi (tree exp, rtx target)
3307 tree arg0, arg1;
3308 rtx op0, op1;
3309 machine_mode mode;
3310 machine_mode mode2;
3312 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3313 return NULL_RTX;
3315 arg0 = CALL_EXPR_ARG (exp, 0);
3316 arg1 = CALL_EXPR_ARG (exp, 1);
3317 mode = TYPE_MODE (TREE_TYPE (exp));
3319 /* Emit a libcall to libgcc. */
3321 /* Mode of the 2nd argument must match that of an int. */
3322 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3324 if (target == NULL_RTX)
3325 target = gen_reg_rtx (mode);
3327 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3328 if (GET_MODE (op0) != mode)
3329 op0 = convert_to_mode (mode, op0, 0);
3330 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3331 if (GET_MODE (op1) != mode2)
3332 op1 = convert_to_mode (mode2, op1, 0);
3334 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3335 target, LCT_CONST, mode,
3336 op0, mode, op1, mode2);
3338 return target;
3341 /* Expand expression EXP which is a call to the strlen builtin. Return
3342 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3343 try to get the result in TARGET, if convenient. */
3345 static rtx
3346 expand_builtin_strlen (tree exp, rtx target,
3347 machine_mode target_mode)
3349 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3350 return NULL_RTX;
3352 tree src = CALL_EXPR_ARG (exp, 0);
3354 /* If the length can be computed at compile-time, return it. */
3355 if (tree len = c_strlen (src, 0))
3356 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3358 /* If the length can be computed at compile-time and is constant
3359 integer, but there are side-effects in src, evaluate
3360 src for side-effects, then return len.
3361 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3362 can be optimized into: i++; x = 3; */
3363 tree len = c_strlen (src, 1);
3364 if (len && TREE_CODE (len) == INTEGER_CST)
3366 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3367 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3370 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3372 /* If SRC is not a pointer type, don't do this operation inline. */
3373 if (align == 0)
3374 return NULL_RTX;
3376 /* Bail out if we can't compute strlen in the right mode. */
3377 machine_mode insn_mode;
3378 enum insn_code icode = CODE_FOR_nothing;
3379 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3381 icode = optab_handler (strlen_optab, insn_mode);
3382 if (icode != CODE_FOR_nothing)
3383 break;
3385 if (insn_mode == VOIDmode)
3386 return NULL_RTX;
3388 /* Make a place to hold the source address. We will not expand
3389 the actual source until we are sure that the expansion will
3390 not fail -- there are trees that cannot be expanded twice. */
3391 rtx src_reg = gen_reg_rtx (Pmode);
3393 /* Mark the beginning of the strlen sequence so we can emit the
3394 source operand later. */
3395 rtx_insn *before_strlen = get_last_insn ();
3397 class expand_operand ops[4];
3398 create_output_operand (&ops[0], target, insn_mode);
3399 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3400 create_integer_operand (&ops[2], 0);
3401 create_integer_operand (&ops[3], align);
3402 if (!maybe_expand_insn (icode, 4, ops))
3403 return NULL_RTX;
3405 /* Check to see if the argument was declared attribute nonstring
3406 and if so, issue a warning since at this point it's not known
3407 to be nul-terminated. */
3408 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3410 /* Now that we are assured of success, expand the source. */
3411 start_sequence ();
3412 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3413 if (pat != src_reg)
3415 #ifdef POINTERS_EXTEND_UNSIGNED
3416 if (GET_MODE (pat) != Pmode)
3417 pat = convert_to_mode (Pmode, pat,
3418 POINTERS_EXTEND_UNSIGNED);
3419 #endif
3420 emit_move_insn (src_reg, pat);
3422 pat = get_insns ();
3423 end_sequence ();
3425 if (before_strlen)
3426 emit_insn_after (pat, before_strlen);
3427 else
3428 emit_insn_before (pat, get_insns ());
3430 /* Return the value in the proper mode for this function. */
3431 if (GET_MODE (ops[0].value) == target_mode)
3432 target = ops[0].value;
3433 else if (target != 0)
3434 convert_move (target, ops[0].value, 0);
3435 else
3436 target = convert_to_mode (target_mode, ops[0].value, 0);
3438 return target;
3441 /* Expand call EXP to the strnlen built-in, returning the result
3442 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3444 static rtx
3445 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3447 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3448 return NULL_RTX;
3450 tree src = CALL_EXPR_ARG (exp, 0);
3451 tree bound = CALL_EXPR_ARG (exp, 1);
3453 if (!bound)
3454 return NULL_RTX;
3456 location_t loc = UNKNOWN_LOCATION;
3457 if (EXPR_HAS_LOCATION (exp))
3458 loc = EXPR_LOCATION (exp);
3460 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3461 so these conversions aren't necessary. */
3462 c_strlen_data lendata = { };
3463 tree len = c_strlen (src, 0, &lendata, 1);
3464 if (len)
3465 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3467 if (TREE_CODE (bound) == INTEGER_CST)
3469 if (!len)
3470 return NULL_RTX;
3472 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3473 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3476 if (TREE_CODE (bound) != SSA_NAME)
3477 return NULL_RTX;
3479 wide_int min, max;
3480 value_range r;
3481 get_global_range_query ()->range_of_expr (r, bound);
3482 if (r.varying_p () || r.undefined_p ())
3483 return NULL_RTX;
3484 min = r.lower_bound ();
3485 max = r.upper_bound ();
3487 if (!len || TREE_CODE (len) != INTEGER_CST)
3489 bool exact;
3490 lendata.decl = unterminated_array (src, &len, &exact);
3491 if (!lendata.decl)
3492 return NULL_RTX;
3495 if (lendata.decl)
3496 return NULL_RTX;
3498 if (wi::gtu_p (min, wi::to_wide (len)))
3499 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3501 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3502 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3505 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3506 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3507 a target constant. */
3509 static rtx
3510 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3511 fixed_size_mode mode)
3513 /* The REPresentation pointed to by DATA need not be a nul-terminated
3514 string but the caller guarantees it's large enough for MODE. */
3515 const char *rep = (const char *) data;
3517 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3520 /* LEN specify length of the block of memcpy/memset operation.
3521 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3522 In some cases we can make very likely guess on max size, then we
3523 set it into PROBABLE_MAX_SIZE. */
3525 static void
3526 determine_block_size (tree len, rtx len_rtx,
3527 unsigned HOST_WIDE_INT *min_size,
3528 unsigned HOST_WIDE_INT *max_size,
3529 unsigned HOST_WIDE_INT *probable_max_size)
3531 if (CONST_INT_P (len_rtx))
3533 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3534 return;
3536 else
3538 wide_int min, max;
3539 enum value_range_kind range_type = VR_UNDEFINED;
3541 /* Determine bounds from the type. */
3542 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3543 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3544 else
3545 *min_size = 0;
3546 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3547 *probable_max_size = *max_size
3548 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3549 else
3550 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3552 if (TREE_CODE (len) == SSA_NAME)
3554 value_range r;
3555 tree tmin, tmax;
3556 get_global_range_query ()->range_of_expr (r, len);
3557 range_type = get_legacy_range (r, tmin, tmax);
3558 if (range_type != VR_UNDEFINED)
3560 min = wi::to_wide (tmin);
3561 max = wi::to_wide (tmax);
3564 if (range_type == VR_RANGE)
3566 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3567 *min_size = min.to_uhwi ();
3568 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3569 *probable_max_size = *max_size = max.to_uhwi ();
3571 else if (range_type == VR_ANTI_RANGE)
3573 /* Code like
3575 int n;
3576 if (n < 100)
3577 memcpy (a, b, n)
3579 Produce anti range allowing negative values of N. We still
3580 can use the information and make a guess that N is not negative.
3582 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3583 *probable_max_size = min.to_uhwi () - 1;
3586 gcc_checking_assert (*max_size <=
3587 (unsigned HOST_WIDE_INT)
3588 GET_MODE_MASK (GET_MODE (len_rtx)));
3591 /* Expand a call EXP to the memcpy builtin.
3592 Return NULL_RTX if we failed, the caller should emit a normal call,
3593 otherwise try to get the result in TARGET, if convenient (and in
3594 mode MODE if that's convenient). */
3596 static rtx
3597 expand_builtin_memcpy (tree exp, rtx target)
3599 if (!validate_arglist (exp,
3600 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3601 return NULL_RTX;
3603 tree dest = CALL_EXPR_ARG (exp, 0);
3604 tree src = CALL_EXPR_ARG (exp, 1);
3605 tree len = CALL_EXPR_ARG (exp, 2);
3607 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3608 /*retmode=*/ RETURN_BEGIN, false);
3611 /* Check a call EXP to the memmove built-in for validity.
3612 Return NULL_RTX on both success and failure. */
3614 static rtx
3615 expand_builtin_memmove (tree exp, rtx target)
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
3625 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3626 /*retmode=*/ RETURN_BEGIN, true);
3629 /* Expand a call EXP to the mempcpy builtin.
3630 Return NULL_RTX if we failed; the caller should emit a normal call,
3631 otherwise try to get the result in TARGET, if convenient (and in
3632 mode MODE if that's convenient). */
3634 static rtx
3635 expand_builtin_mempcpy (tree exp, rtx target)
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3641 tree dest = CALL_EXPR_ARG (exp, 0);
3642 tree src = CALL_EXPR_ARG (exp, 1);
3643 tree len = CALL_EXPR_ARG (exp, 2);
3645 /* Policy does not generally allow using compute_objsize (which
3646 is used internally by check_memop_size) to change code generation
3647 or drive optimization decisions.
3649 In this instance it is safe because the code we generate has
3650 the same semantics regardless of the return value of
3651 check_memop_sizes. Exactly the same amount of data is copied
3652 and the return value is exactly the same in both cases.
3654 Furthermore, check_memop_size always uses mode 0 for the call to
3655 compute_objsize, so the imprecise nature of compute_objsize is
3656 avoided. */
3658 /* Avoid expanding mempcpy into memcpy when the call is determined
3659 to overflow the buffer. This also prevents the same overflow
3660 from being diagnosed again when expanding memcpy. */
3662 return expand_builtin_mempcpy_args (dest, src, len,
3663 target, exp, /*retmode=*/ RETURN_END);
3666 /* Helper function to do the actual work for expand of memory copy family
3667 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3668 of memory from SRC to DEST and assign to TARGET if convenient. Return
3669 value is based on RETMODE argument. */
3671 static rtx
3672 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3673 rtx target, tree exp, memop_ret retmode,
3674 bool might_overlap)
3676 unsigned int src_align = get_pointer_alignment (src);
3677 unsigned int dest_align = get_pointer_alignment (dest);
3678 rtx dest_mem, src_mem, dest_addr, len_rtx;
3679 HOST_WIDE_INT expected_size = -1;
3680 unsigned int expected_align = 0;
3681 unsigned HOST_WIDE_INT min_size;
3682 unsigned HOST_WIDE_INT max_size;
3683 unsigned HOST_WIDE_INT probable_max_size;
3685 bool is_move_done;
3687 /* If DEST is not a pointer type, call the normal function. */
3688 if (dest_align == 0)
3689 return NULL_RTX;
3691 /* If either SRC is not a pointer type, don't do this
3692 operation in-line. */
3693 if (src_align == 0)
3694 return NULL_RTX;
3696 if (currently_expanding_gimple_stmt)
3697 stringop_block_profile (currently_expanding_gimple_stmt,
3698 &expected_align, &expected_size);
3700 if (expected_align < dest_align)
3701 expected_align = dest_align;
3702 dest_mem = get_memory_rtx (dest, len);
3703 set_mem_align (dest_mem, dest_align);
3704 len_rtx = expand_normal (len);
3705 determine_block_size (len, len_rtx, &min_size, &max_size,
3706 &probable_max_size);
3708 /* Try to get the byte representation of the constant SRC points to,
3709 with its byte size in NBYTES. */
3710 unsigned HOST_WIDE_INT nbytes;
3711 const char *rep = getbyterep (src, &nbytes);
3713 /* If the function's constant bound LEN_RTX is less than or equal
3714 to the byte size of the representation of the constant argument,
3715 and if block move would be done by pieces, we can avoid loading
3716 the bytes from memory and only store the computed constant.
3717 This works in the overlap (memmove) case as well because
3718 store_by_pieces just generates a series of stores of constants
3719 from the representation returned by getbyterep(). */
3720 if (rep
3721 && CONST_INT_P (len_rtx)
3722 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3723 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3724 CONST_CAST (char *, rep),
3725 dest_align, false))
3727 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3728 builtin_memcpy_read_str,
3729 CONST_CAST (char *, rep),
3730 dest_align, false, retmode);
3731 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3732 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3733 return dest_mem;
3736 src_mem = get_memory_rtx (src, len);
3737 set_mem_align (src_mem, src_align);
3739 /* Copy word part most expediently. */
3740 enum block_op_methods method = BLOCK_OP_NORMAL;
3741 if (CALL_EXPR_TAILCALL (exp)
3742 && (retmode == RETURN_BEGIN || target == const0_rtx))
3743 method = BLOCK_OP_TAILCALL;
3744 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3745 && retmode == RETURN_END
3746 && !might_overlap
3747 && target != const0_rtx);
3748 if (use_mempcpy_call)
3749 method = BLOCK_OP_NO_LIBCALL_RET;
3750 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3751 expected_align, expected_size,
3752 min_size, max_size, probable_max_size,
3753 use_mempcpy_call, &is_move_done,
3754 might_overlap);
3756 /* Bail out when a mempcpy call would be expanded as libcall and when
3757 we have a target that provides a fast implementation
3758 of mempcpy routine. */
3759 if (!is_move_done)
3760 return NULL_RTX;
3762 if (dest_addr == pc_rtx)
3763 return NULL_RTX;
3765 if (dest_addr == 0)
3767 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3768 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3771 if (retmode != RETURN_BEGIN && target != const0_rtx)
3773 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3774 /* stpcpy pointer to last byte. */
3775 if (retmode == RETURN_END_MINUS_ONE)
3776 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3779 return dest_addr;
3782 static rtx
3783 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3784 rtx target, tree orig_exp, memop_ret retmode)
3786 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3787 retmode, false);
3790 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3791 we failed, the caller should emit a normal call, otherwise try to
3792 get the result in TARGET, if convenient.
3793 Return value is based on RETMODE argument. */
3795 static rtx
3796 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3798 class expand_operand ops[3];
3799 rtx dest_mem;
3800 rtx src_mem;
3802 if (!targetm.have_movstr ())
3803 return NULL_RTX;
3805 dest_mem = get_memory_rtx (dest, NULL);
3806 src_mem = get_memory_rtx (src, NULL);
3807 if (retmode == RETURN_BEGIN)
3809 target = force_reg (Pmode, XEXP (dest_mem, 0));
3810 dest_mem = replace_equiv_address (dest_mem, target);
3813 create_output_operand (&ops[0],
3814 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3815 create_fixed_operand (&ops[1], dest_mem);
3816 create_fixed_operand (&ops[2], src_mem);
3817 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3818 return NULL_RTX;
3820 if (retmode != RETURN_BEGIN && target != const0_rtx)
3822 target = ops[0].value;
3823 /* movstr is supposed to set end to the address of the NUL
3824 terminator. If the caller requested a mempcpy-like return value,
3825 adjust it. */
3826 if (retmode == RETURN_END)
3828 rtx tem = plus_constant (GET_MODE (target),
3829 gen_lowpart (GET_MODE (target), target), 1);
3830 emit_move_insn (target, force_operand (tem, NULL_RTX));
3833 return target;
3836 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3837 NULL_RTX if we failed the caller should emit a normal call, otherwise
3838 try to get the result in TARGET, if convenient (and in mode MODE if that's
3839 convenient). */
3841 static rtx
3842 expand_builtin_strcpy (tree exp, rtx target)
3844 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3845 return NULL_RTX;
3847 tree dest = CALL_EXPR_ARG (exp, 0);
3848 tree src = CALL_EXPR_ARG (exp, 1);
3850 return expand_builtin_strcpy_args (exp, dest, src, target);
3853 /* Helper function to do the actual work for expand_builtin_strcpy. The
3854 arguments to the builtin_strcpy call DEST and SRC are broken out
3855 so that this can also be called without constructing an actual CALL_EXPR.
3856 The other arguments and return value are the same as for
3857 expand_builtin_strcpy. */
3859 static rtx
3860 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3862 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3865 /* Expand a call EXP to the stpcpy builtin.
3866 Return NULL_RTX if we failed the caller should emit a normal call,
3867 otherwise try to get the result in TARGET, if convenient (and in
3868 mode MODE if that's convenient). */
3870 static rtx
3871 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3873 tree dst, src;
3874 location_t loc = EXPR_LOCATION (exp);
3876 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3877 return NULL_RTX;
3879 dst = CALL_EXPR_ARG (exp, 0);
3880 src = CALL_EXPR_ARG (exp, 1);
3882 /* If return value is ignored, transform stpcpy into strcpy. */
3883 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3885 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3886 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3887 return expand_expr (result, target, mode, EXPAND_NORMAL);
3889 else
3891 tree len, lenp1;
3892 rtx ret;
3894 /* Ensure we get an actual string whose length can be evaluated at
3895 compile-time, not an expression containing a string. This is
3896 because the latter will potentially produce pessimized code
3897 when used to produce the return value. */
3898 c_strlen_data lendata = { };
3899 if (!c_getstr (src)
3900 || !(len = c_strlen (src, 0, &lendata, 1)))
3901 return expand_movstr (dst, src, target,
3902 /*retmode=*/ RETURN_END_MINUS_ONE);
3904 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3905 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3906 target, exp,
3907 /*retmode=*/ RETURN_END_MINUS_ONE);
3909 if (ret)
3910 return ret;
3912 if (TREE_CODE (len) == INTEGER_CST)
3914 rtx len_rtx = expand_normal (len);
3916 if (CONST_INT_P (len_rtx))
3918 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3920 if (ret)
3922 if (! target)
3924 if (mode != VOIDmode)
3925 target = gen_reg_rtx (mode);
3926 else
3927 target = gen_reg_rtx (GET_MODE (ret));
3929 if (GET_MODE (target) != GET_MODE (ret))
3930 ret = gen_lowpart (GET_MODE (target), ret);
3932 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3933 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3934 gcc_assert (ret);
3936 return target;
3941 return expand_movstr (dst, src, target,
3942 /*retmode=*/ RETURN_END_MINUS_ONE);
3946 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3947 arguments while being careful to avoid duplicate warnings (which could
3948 be issued if the expander were to expand the call, resulting in it
3949 being emitted in expand_call(). */
3951 static rtx
3952 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3954 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3956 /* The call has been successfully expanded. Check for nonstring
3957 arguments and issue warnings as appropriate. */
3958 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3959 return ret;
3962 return NULL_RTX;
3965 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3966 bytes from constant string DATA + OFFSET and return it as target
3967 constant. */
3970 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3971 fixed_size_mode mode)
3973 const char *str = (const char *) data;
3975 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3976 return const0_rtx;
3978 return c_readstr (str + offset, mode);
3981 /* Helper to check the sizes of sequences and the destination of calls
3982 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3983 success (no overflow or invalid sizes), false otherwise. */
3985 static bool
3986 check_strncat_sizes (tree exp, tree objsize)
3988 tree dest = CALL_EXPR_ARG (exp, 0);
3989 tree src = CALL_EXPR_ARG (exp, 1);
3990 tree maxread = CALL_EXPR_ARG (exp, 2);
3992 /* Try to determine the range of lengths that the source expression
3993 refers to. */
3994 c_strlen_data lendata = { };
3995 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3997 /* Try to verify that the destination is big enough for the shortest
3998 string. */
4000 access_data data (nullptr, exp, access_read_write, maxread, true);
4001 if (!objsize && warn_stringop_overflow)
4003 /* If it hasn't been provided by __strncat_chk, try to determine
4004 the size of the destination object into which the source is
4005 being copied. */
4006 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4009 /* Add one for the terminating nul. */
4010 tree srclen = (lendata.minlen
4011 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4012 size_one_node)
4013 : NULL_TREE);
4015 /* The strncat function copies at most MAXREAD bytes and always appends
4016 the terminating nul so the specified upper bound should never be equal
4017 to (or greater than) the size of the destination. */
4018 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4019 && tree_int_cst_equal (objsize, maxread))
4021 location_t loc = EXPR_LOCATION (exp);
4022 warning_at (loc, OPT_Wstringop_overflow_,
4023 "%qD specified bound %E equals destination size",
4024 get_callee_fndecl (exp), maxread);
4026 return false;
4029 if (!srclen
4030 || (maxread && tree_fits_uhwi_p (maxread)
4031 && tree_fits_uhwi_p (srclen)
4032 && tree_int_cst_lt (maxread, srclen)))
4033 srclen = maxread;
4035 /* The number of bytes to write is LEN but check_access will alsoa
4036 check SRCLEN if LEN's value isn't known. */
4037 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4038 objsize, data.mode, &data);
4041 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call. */
4044 static rtx
4045 expand_builtin_strncpy (tree exp, rtx target)
4047 location_t loc = EXPR_LOCATION (exp);
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052 tree dest = CALL_EXPR_ARG (exp, 0);
4053 tree src = CALL_EXPR_ARG (exp, 1);
4054 /* The number of bytes to write (not the maximum). */
4055 tree len = CALL_EXPR_ARG (exp, 2);
4057 /* The length of the source sequence. */
4058 tree slen = c_strlen (src, 1);
4060 /* We must be passed a constant len and src parameter. */
4061 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4062 return NULL_RTX;
4064 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4066 /* We're required to pad with trailing zeros if the requested
4067 len is greater than strlen(s2)+1. In that case try to
4068 use store_by_pieces, if it fails, punt. */
4069 if (tree_int_cst_lt (slen, len))
4071 unsigned int dest_align = get_pointer_alignment (dest);
4072 const char *p = c_getstr (src);
4073 rtx dest_mem;
4075 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4076 || !can_store_by_pieces (tree_to_uhwi (len),
4077 builtin_strncpy_read_str,
4078 CONST_CAST (char *, p),
4079 dest_align, false))
4080 return NULL_RTX;
4082 dest_mem = get_memory_rtx (dest, len);
4083 store_by_pieces (dest_mem, tree_to_uhwi (len),
4084 builtin_strncpy_read_str,
4085 CONST_CAST (char *, p), dest_align, false,
4086 RETURN_BEGIN);
4087 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4088 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4089 return dest_mem;
4092 return NULL_RTX;
4095 /* Return the RTL of a register in MODE generated from PREV in the
4096 previous iteration. */
4098 static rtx
4099 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4101 rtx target = nullptr;
4102 if (prev != nullptr && prev->data != nullptr)
4104 /* Use the previous data in the same mode. */
4105 if (prev->mode == mode)
4106 return prev->data;
4108 fixed_size_mode prev_mode = prev->mode;
4110 /* Don't use the previous data to write QImode if it is in a
4111 vector mode. */
4112 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4113 return target;
4115 rtx prev_rtx = prev->data;
4117 if (REG_P (prev_rtx)
4118 && HARD_REGISTER_P (prev_rtx)
4119 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4121 /* This case occurs when PREV_MODE is a vector and when
4122 MODE is too small to store using vector operations.
4123 After register allocation, the code will need to move the
4124 lowpart of the vector register into a non-vector register.
4126 Also, the target has chosen to use a hard register
4127 instead of going with the default choice of using a
4128 pseudo register. We should respect that choice and try to
4129 avoid creating a pseudo register with the same mode as the
4130 current hard register.
4132 In principle, we could just use a lowpart MODE subreg of
4133 the vector register. However, the vector register mode might
4134 be too wide for non-vector registers, and we already know
4135 that the non-vector mode is too small for vector registers.
4136 It's therefore likely that we'd need to spill to memory in
4137 the vector mode and reload the non-vector value from there.
4139 Try to avoid that by reducing the vector register to the
4140 smallest size that it can hold. This should increase the
4141 chances that non-vector registers can hold both the inner
4142 and outer modes of the subreg that we generate later. */
4143 machine_mode m;
4144 fixed_size_mode candidate;
4145 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4146 if (is_a<fixed_size_mode> (m, &candidate))
4148 if (GET_MODE_SIZE (candidate)
4149 >= GET_MODE_SIZE (prev_mode))
4150 break;
4151 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4152 && lowpart_subreg_regno (REGNO (prev_rtx),
4153 prev_mode, candidate) >= 0)
4155 target = lowpart_subreg (candidate, prev_rtx,
4156 prev_mode);
4157 prev_rtx = target;
4158 prev_mode = candidate;
4159 break;
4162 if (target == nullptr)
4163 prev_rtx = copy_to_reg (prev_rtx);
4166 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4168 return target;
4171 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4172 bytes from constant string DATA + OFFSET and return it as target
4173 constant. If PREV isn't nullptr, it has the RTL info from the
4174 previous iteration. */
4177 builtin_memset_read_str (void *data, void *prev,
4178 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4179 fixed_size_mode mode)
4181 const char *c = (const char *) data;
4182 unsigned int size = GET_MODE_SIZE (mode);
4184 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4185 mode);
4186 if (target != nullptr)
4187 return target;
4188 rtx src = gen_int_mode (*c, QImode);
4190 if (VECTOR_MODE_P (mode))
4192 gcc_assert (GET_MODE_INNER (mode) == QImode);
4194 rtx const_vec = gen_const_vec_duplicate (mode, src);
4195 if (prev == NULL)
4196 /* Return CONST_VECTOR when called by a query function. */
4197 return const_vec;
4199 /* Use the move expander with CONST_VECTOR. */
4200 target = gen_reg_rtx (mode);
4201 emit_move_insn (target, const_vec);
4202 return target;
4205 char *p = XALLOCAVEC (char, size);
4207 memset (p, *c, size);
4209 return c_readstr (p, mode);
4212 /* Callback routine for store_by_pieces. Return the RTL of a register
4213 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4214 char value given in the RTL register data. For example, if mode is
4215 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4216 nullptr, it has the RTL info from the previous iteration. */
4218 static rtx
4219 builtin_memset_gen_str (void *data, void *prev,
4220 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4221 fixed_size_mode mode)
4223 rtx target, coeff;
4224 size_t size;
4225 char *p;
4227 size = GET_MODE_SIZE (mode);
4228 if (size == 1)
4229 return (rtx) data;
4231 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4232 if (target != nullptr)
4233 return target;
4235 if (VECTOR_MODE_P (mode))
4237 gcc_assert (GET_MODE_INNER (mode) == QImode);
4239 /* vec_duplicate_optab is a precondition to pick a vector mode for
4240 the memset expander. */
4241 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4243 target = gen_reg_rtx (mode);
4244 class expand_operand ops[2];
4245 create_output_operand (&ops[0], target, mode);
4246 create_input_operand (&ops[1], (rtx) data, QImode);
4247 expand_insn (icode, 2, ops);
4248 if (!rtx_equal_p (target, ops[0].value))
4249 emit_move_insn (target, ops[0].value);
4251 return target;
4254 p = XALLOCAVEC (char, size);
4255 memset (p, 1, size);
4256 coeff = c_readstr (p, mode);
4258 target = convert_to_mode (mode, (rtx) data, 1);
4259 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4260 return force_reg (mode, target);
4263 /* Expand expression EXP, which is a call to the memset builtin. Return
4264 NULL_RTX if we failed the caller should emit a normal call, otherwise
4265 try to get the result in TARGET, if convenient (and in mode MODE if that's
4266 convenient). */
4269 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4271 if (!validate_arglist (exp,
4272 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4273 return NULL_RTX;
4275 tree dest = CALL_EXPR_ARG (exp, 0);
4276 tree val = CALL_EXPR_ARG (exp, 1);
4277 tree len = CALL_EXPR_ARG (exp, 2);
4279 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4282 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4283 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4284 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4285 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4287 The strategy is to issue one store_by_pieces for each power of two,
4288 from most to least significant, guarded by a test on whether there
4289 are at least that many bytes left to copy in LEN.
4291 ??? Should we skip some powers of two in favor of loops? Maybe start
4292 at the max of TO/LEN/word alignment, at least when optimizing for
4293 size, instead of ensuring O(log len) dynamic compares? */
4295 bool
4296 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4297 unsigned HOST_WIDE_INT min_len,
4298 unsigned HOST_WIDE_INT max_len,
4299 rtx val, char valc, unsigned int align)
4301 int max_bits = floor_log2 (max_len);
4302 int min_bits = floor_log2 (min_len);
4303 int sctz_len = ctz_len;
4305 gcc_checking_assert (sctz_len >= 0);
4307 if (val)
4308 valc = 1;
4310 /* Bits more significant than TST_BITS are part of the shared prefix
4311 in the binary representation of both min_len and max_len. Since
4312 they're identical, we don't need to test them in the loop. */
4313 int tst_bits = (max_bits != min_bits ? max_bits
4314 : floor_log2 (max_len ^ min_len));
4316 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4317 bytes, to lower max_bits. In the unlikely case of a constant LEN
4318 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4319 single store_by_pieces, but otherwise, select the minimum multiple
4320 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4321 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4322 unsigned HOST_WIDE_INT blksize;
4323 if (max_len > min_len)
4325 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4326 align / BITS_PER_UNIT);
4327 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4328 blksize &= ~(alrng - 1);
4330 else if (max_len == min_len)
4331 blksize = max_len;
4332 else
4333 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4334 return false;
4335 if (min_len >= blksize)
4337 min_len -= blksize;
4338 min_bits = floor_log2 (min_len);
4339 max_len -= blksize;
4340 max_bits = floor_log2 (max_len);
4342 tst_bits = (max_bits != min_bits ? max_bits
4343 : floor_log2 (max_len ^ min_len));
4345 else
4346 blksize = 0;
4348 /* Check that we can use store by pieces for the maximum store count
4349 we may issue (initial fixed-size block, plus conditional
4350 power-of-two-sized from max_bits to ctz_len. */
4351 unsigned HOST_WIDE_INT xlenest = blksize;
4352 if (max_bits >= 0)
4353 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4354 - (HOST_WIDE_INT_1U << ctz_len));
4355 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4356 &valc, align, true))
4357 return false;
4359 by_pieces_constfn constfun;
4360 void *constfundata;
4361 if (val)
4363 constfun = builtin_memset_gen_str;
4364 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4365 val);
4367 else
4369 constfun = builtin_memset_read_str;
4370 constfundata = &valc;
4373 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4374 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4375 to = replace_equiv_address (to, ptr);
4376 set_mem_align (to, align);
4378 if (blksize)
4380 to = store_by_pieces (to, blksize,
4381 constfun, constfundata,
4382 align, true,
4383 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4384 if (max_len == 0)
4385 return true;
4387 /* Adjust PTR, TO and REM. Since TO's address is likely
4388 PTR+offset, we have to replace it. */
4389 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4390 to = replace_equiv_address (to, ptr);
4391 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4392 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4395 /* Iterate over power-of-two block sizes from the maximum length to
4396 the least significant bit possibly set in the length. */
4397 for (int i = max_bits; i >= sctz_len; i--)
4399 rtx_code_label *label = NULL;
4400 blksize = HOST_WIDE_INT_1U << i;
4402 /* If we're past the bits shared between min_ and max_len, expand
4403 a test on the dynamic length, comparing it with the
4404 BLKSIZE. */
4405 if (i <= tst_bits)
4407 label = gen_label_rtx ();
4408 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4409 ptr_mode, 1, label,
4410 profile_probability::even ());
4412 /* If we are at a bit that is in the prefix shared by min_ and
4413 max_len, skip this BLKSIZE if the bit is clear. */
4414 else if ((max_len & blksize) == 0)
4415 continue;
4417 /* Issue a store of BLKSIZE bytes. */
4418 to = store_by_pieces (to, blksize,
4419 constfun, constfundata,
4420 align, true,
4421 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4423 /* Adjust REM and PTR, unless this is the last iteration. */
4424 if (i != sctz_len)
4426 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4427 to = replace_equiv_address (to, ptr);
4428 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4429 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4432 if (label)
4434 emit_label (label);
4436 /* Given conditional stores, the offset can no longer be
4437 known, so clear it. */
4438 clear_mem_offset (to);
4442 return true;
4445 /* Helper function to do the actual work for expand_builtin_memset. The
4446 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4447 so that this can also be called without constructing an actual CALL_EXPR.
4448 The other arguments and return value are the same as for
4449 expand_builtin_memset. */
4451 static rtx
4452 expand_builtin_memset_args (tree dest, tree val, tree len,
4453 rtx target, machine_mode mode, tree orig_exp)
4455 tree fndecl, fn;
4456 enum built_in_function fcode;
4457 machine_mode val_mode;
4458 char c;
4459 unsigned int dest_align;
4460 rtx dest_mem, dest_addr, len_rtx;
4461 HOST_WIDE_INT expected_size = -1;
4462 unsigned int expected_align = 0;
4463 unsigned HOST_WIDE_INT min_size;
4464 unsigned HOST_WIDE_INT max_size;
4465 unsigned HOST_WIDE_INT probable_max_size;
4467 dest_align = get_pointer_alignment (dest);
4469 /* If DEST is not a pointer type, don't do this operation in-line. */
4470 if (dest_align == 0)
4471 return NULL_RTX;
4473 if (currently_expanding_gimple_stmt)
4474 stringop_block_profile (currently_expanding_gimple_stmt,
4475 &expected_align, &expected_size);
4477 if (expected_align < dest_align)
4478 expected_align = dest_align;
4480 /* If the LEN parameter is zero, return DEST. */
4481 if (integer_zerop (len))
4483 /* Evaluate and ignore VAL in case it has side-effects. */
4484 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4485 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4488 /* Stabilize the arguments in case we fail. */
4489 dest = builtin_save_expr (dest);
4490 val = builtin_save_expr (val);
4491 len = builtin_save_expr (len);
4493 len_rtx = expand_normal (len);
4494 determine_block_size (len, len_rtx, &min_size, &max_size,
4495 &probable_max_size);
4496 dest_mem = get_memory_rtx (dest, len);
4497 val_mode = TYPE_MODE (unsigned_char_type_node);
4499 if (TREE_CODE (val) != INTEGER_CST
4500 || target_char_cast (val, &c))
4502 rtx val_rtx;
4504 val_rtx = expand_normal (val);
4505 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4507 /* Assume that we can memset by pieces if we can store
4508 * the coefficients by pieces (in the required modes).
4509 * We can't pass builtin_memset_gen_str as that emits RTL. */
4510 c = 1;
4511 if (tree_fits_uhwi_p (len)
4512 && can_store_by_pieces (tree_to_uhwi (len),
4513 builtin_memset_read_str, &c, dest_align,
4514 true))
4516 val_rtx = force_reg (val_mode, val_rtx);
4517 store_by_pieces (dest_mem, tree_to_uhwi (len),
4518 builtin_memset_gen_str, val_rtx, dest_align,
4519 true, RETURN_BEGIN);
4521 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4522 dest_align, expected_align,
4523 expected_size, min_size, max_size,
4524 probable_max_size)
4525 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4526 tree_ctz (len),
4527 min_size, max_size,
4528 val_rtx, 0,
4529 dest_align))
4530 goto do_libcall;
4532 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4533 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4534 return dest_mem;
4537 if (c)
4539 if (tree_fits_uhwi_p (len)
4540 && can_store_by_pieces (tree_to_uhwi (len),
4541 builtin_memset_read_str, &c, dest_align,
4542 true))
4543 store_by_pieces (dest_mem, tree_to_uhwi (len),
4544 builtin_memset_read_str, &c, dest_align, true,
4545 RETURN_BEGIN);
4546 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4547 gen_int_mode (c, val_mode),
4548 dest_align, expected_align,
4549 expected_size, min_size, max_size,
4550 probable_max_size)
4551 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4552 tree_ctz (len),
4553 min_size, max_size,
4554 NULL_RTX, c,
4555 dest_align))
4556 goto do_libcall;
4558 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4559 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4560 return dest_mem;
4563 set_mem_align (dest_mem, dest_align);
4564 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4565 CALL_EXPR_TAILCALL (orig_exp)
4566 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4567 expected_align, expected_size,
4568 min_size, max_size,
4569 probable_max_size, tree_ctz (len));
4571 if (dest_addr == 0)
4573 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4574 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4577 return dest_addr;
4579 do_libcall:
4580 fndecl = get_callee_fndecl (orig_exp);
4581 fcode = DECL_FUNCTION_CODE (fndecl);
4582 if (fcode == BUILT_IN_MEMSET)
4583 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4584 dest, val, len);
4585 else if (fcode == BUILT_IN_BZERO)
4586 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4587 dest, len);
4588 else
4589 gcc_unreachable ();
4590 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4591 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4592 return expand_call (fn, target, target == const0_rtx);
4595 /* Expand expression EXP, which is a call to the bzero builtin. Return
4596 NULL_RTX if we failed the caller should emit a normal call. */
4598 static rtx
4599 expand_builtin_bzero (tree exp)
4601 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4602 return NULL_RTX;
4604 tree dest = CALL_EXPR_ARG (exp, 0);
4605 tree size = CALL_EXPR_ARG (exp, 1);
4607 /* New argument list transforming bzero(ptr x, int y) to
4608 memset(ptr x, int 0, size_t y). This is done this way
4609 so that if it isn't expanded inline, we fallback to
4610 calling bzero instead of memset. */
4612 location_t loc = EXPR_LOCATION (exp);
4614 return expand_builtin_memset_args (dest, integer_zero_node,
4615 fold_convert_loc (loc,
4616 size_type_node, size),
4617 const0_rtx, VOIDmode, exp);
4620 /* Try to expand cmpstr operation ICODE with the given operands.
4621 Return the result rtx on success, otherwise return null. */
4623 static rtx
4624 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4625 HOST_WIDE_INT align)
4627 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4629 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4630 target = NULL_RTX;
4632 class expand_operand ops[4];
4633 create_output_operand (&ops[0], target, insn_mode);
4634 create_fixed_operand (&ops[1], arg1_rtx);
4635 create_fixed_operand (&ops[2], arg2_rtx);
4636 create_integer_operand (&ops[3], align);
4637 if (maybe_expand_insn (icode, 4, ops))
4638 return ops[0].value;
4639 return NULL_RTX;
4642 /* Expand expression EXP, which is a call to the memcmp built-in function.
4643 Return NULL_RTX if we failed and the caller should emit a normal call,
4644 otherwise try to get the result in TARGET, if convenient.
4645 RESULT_EQ is true if we can relax the returned value to be either zero
4646 or nonzero, without caring about the sign. */
4648 static rtx
4649 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4651 if (!validate_arglist (exp,
4652 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4653 return NULL_RTX;
4655 tree arg1 = CALL_EXPR_ARG (exp, 0);
4656 tree arg2 = CALL_EXPR_ARG (exp, 1);
4657 tree len = CALL_EXPR_ARG (exp, 2);
4659 /* Due to the performance benefit, always inline the calls first
4660 when result_eq is false. */
4661 rtx result = NULL_RTX;
4662 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4663 if (!result_eq && fcode != BUILT_IN_BCMP)
4665 result = inline_expand_builtin_bytecmp (exp, target);
4666 if (result)
4667 return result;
4670 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4671 location_t loc = EXPR_LOCATION (exp);
4673 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4674 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4676 /* If we don't have POINTER_TYPE, call the function. */
4677 if (arg1_align == 0 || arg2_align == 0)
4678 return NULL_RTX;
4680 rtx arg1_rtx = get_memory_rtx (arg1, len);
4681 rtx arg2_rtx = get_memory_rtx (arg2, len);
4682 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4684 /* Set MEM_SIZE as appropriate. */
4685 if (CONST_INT_P (len_rtx))
4687 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4688 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4691 by_pieces_constfn constfn = NULL;
4693 /* Try to get the byte representation of the constant ARG2 (or, only
4694 when the function's result is used for equality to zero, ARG1)
4695 points to, with its byte size in NBYTES. */
4696 unsigned HOST_WIDE_INT nbytes;
4697 const char *rep = getbyterep (arg2, &nbytes);
4698 if (result_eq && rep == NULL)
4700 /* For equality to zero the arguments are interchangeable. */
4701 rep = getbyterep (arg1, &nbytes);
4702 if (rep != NULL)
4703 std::swap (arg1_rtx, arg2_rtx);
4706 /* If the function's constant bound LEN_RTX is less than or equal
4707 to the byte size of the representation of the constant argument,
4708 and if block move would be done by pieces, we can avoid loading
4709 the bytes from memory and only store the computed constant result. */
4710 if (rep
4711 && CONST_INT_P (len_rtx)
4712 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4713 constfn = builtin_memcpy_read_str;
4715 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4716 TREE_TYPE (len), target,
4717 result_eq, constfn,
4718 CONST_CAST (char *, rep));
4720 if (result)
4722 /* Return the value in the proper mode for this function. */
4723 if (GET_MODE (result) == mode)
4724 return result;
4726 if (target != 0)
4728 convert_move (target, result, 0);
4729 return target;
4732 return convert_to_mode (mode, result, 0);
4735 return NULL_RTX;
4738 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4739 if we failed the caller should emit a normal call, otherwise try to get
4740 the result in TARGET, if convenient. */
4742 static rtx
4743 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4746 return NULL_RTX;
4748 tree arg1 = CALL_EXPR_ARG (exp, 0);
4749 tree arg2 = CALL_EXPR_ARG (exp, 1);
4751 /* Due to the performance benefit, always inline the calls first. */
4752 rtx result = NULL_RTX;
4753 result = inline_expand_builtin_bytecmp (exp, target);
4754 if (result)
4755 return result;
4757 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4758 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4759 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4760 return NULL_RTX;
4762 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4763 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4765 /* If we don't have POINTER_TYPE, call the function. */
4766 if (arg1_align == 0 || arg2_align == 0)
4767 return NULL_RTX;
4769 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4770 arg1 = builtin_save_expr (arg1);
4771 arg2 = builtin_save_expr (arg2);
4773 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4774 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4776 /* Try to call cmpstrsi. */
4777 if (cmpstr_icode != CODE_FOR_nothing)
4778 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4779 MIN (arg1_align, arg2_align));
4781 /* Try to determine at least one length and call cmpstrnsi. */
4782 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4784 tree len;
4785 rtx arg3_rtx;
4787 tree len1 = c_strlen (arg1, 1);
4788 tree len2 = c_strlen (arg2, 1);
4790 if (len1)
4791 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4792 if (len2)
4793 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4795 /* If we don't have a constant length for the first, use the length
4796 of the second, if we know it. We don't require a constant for
4797 this case; some cost analysis could be done if both are available
4798 but neither is constant. For now, assume they're equally cheap,
4799 unless one has side effects. If both strings have constant lengths,
4800 use the smaller. */
4802 if (!len1)
4803 len = len2;
4804 else if (!len2)
4805 len = len1;
4806 else if (TREE_SIDE_EFFECTS (len1))
4807 len = len2;
4808 else if (TREE_SIDE_EFFECTS (len2))
4809 len = len1;
4810 else if (TREE_CODE (len1) != INTEGER_CST)
4811 len = len2;
4812 else if (TREE_CODE (len2) != INTEGER_CST)
4813 len = len1;
4814 else if (tree_int_cst_lt (len1, len2))
4815 len = len1;
4816 else
4817 len = len2;
4819 /* If both arguments have side effects, we cannot optimize. */
4820 if (len && !TREE_SIDE_EFFECTS (len))
4822 arg3_rtx = expand_normal (len);
4823 result = expand_cmpstrn_or_cmpmem
4824 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4825 arg3_rtx, MIN (arg1_align, arg2_align));
4829 tree fndecl = get_callee_fndecl (exp);
4830 if (result)
4832 /* Return the value in the proper mode for this function. */
4833 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4834 if (GET_MODE (result) == mode)
4835 return result;
4836 if (target == 0)
4837 return convert_to_mode (mode, result, 0);
4838 convert_move (target, result, 0);
4839 return target;
4842 /* Expand the library call ourselves using a stabilized argument
4843 list to avoid re-evaluating the function's arguments twice. */
4844 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4845 copy_warning (fn, exp);
4846 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4847 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4848 return expand_call (fn, target, target == const0_rtx);
4851 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4852 NULL_RTX if we failed the caller should emit a normal call, otherwise
4853 try to get the result in TARGET, if convenient. */
4855 static rtx
4856 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4857 ATTRIBUTE_UNUSED machine_mode mode)
4859 if (!validate_arglist (exp,
4860 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4861 return NULL_RTX;
4863 tree arg1 = CALL_EXPR_ARG (exp, 0);
4864 tree arg2 = CALL_EXPR_ARG (exp, 1);
4865 tree arg3 = CALL_EXPR_ARG (exp, 2);
4867 location_t loc = EXPR_LOCATION (exp);
4868 tree len1 = c_strlen (arg1, 1);
4869 tree len2 = c_strlen (arg2, 1);
4871 /* Due to the performance benefit, always inline the calls first. */
4872 rtx result = NULL_RTX;
4873 result = inline_expand_builtin_bytecmp (exp, target);
4874 if (result)
4875 return result;
4877 /* If c_strlen can determine an expression for one of the string
4878 lengths, and it doesn't have side effects, then emit cmpstrnsi
4879 using length MIN(strlen(string)+1, arg3). */
4880 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4881 if (cmpstrn_icode == CODE_FOR_nothing)
4882 return NULL_RTX;
4884 tree len;
4886 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4887 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4889 if (len1)
4890 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4891 if (len2)
4892 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4894 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4896 /* If we don't have a constant length for the first, use the length
4897 of the second, if we know it. If neither string is constant length,
4898 use the given length argument. We don't require a constant for
4899 this case; some cost analysis could be done if both are available
4900 but neither is constant. For now, assume they're equally cheap,
4901 unless one has side effects. If both strings have constant lengths,
4902 use the smaller. */
4904 if (!len1 && !len2)
4905 len = len3;
4906 else if (!len1)
4907 len = len2;
4908 else if (!len2)
4909 len = len1;
4910 else if (TREE_SIDE_EFFECTS (len1))
4911 len = len2;
4912 else if (TREE_SIDE_EFFECTS (len2))
4913 len = len1;
4914 else if (TREE_CODE (len1) != INTEGER_CST)
4915 len = len2;
4916 else if (TREE_CODE (len2) != INTEGER_CST)
4917 len = len1;
4918 else if (tree_int_cst_lt (len1, len2))
4919 len = len1;
4920 else
4921 len = len2;
4923 /* If we are not using the given length, we must incorporate it here.
4924 The actual new length parameter will be MIN(len,arg3) in this case. */
4925 if (len != len3)
4927 len = fold_convert_loc (loc, sizetype, len);
4928 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4930 rtx arg1_rtx = get_memory_rtx (arg1, len);
4931 rtx arg2_rtx = get_memory_rtx (arg2, len);
4932 rtx arg3_rtx = expand_normal (len);
4933 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4934 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4935 MIN (arg1_align, arg2_align));
4937 tree fndecl = get_callee_fndecl (exp);
4938 if (result)
4940 /* Return the value in the proper mode for this function. */
4941 mode = TYPE_MODE (TREE_TYPE (exp));
4942 if (GET_MODE (result) == mode)
4943 return result;
4944 if (target == 0)
4945 return convert_to_mode (mode, result, 0);
4946 convert_move (target, result, 0);
4947 return target;
4950 /* Expand the library call ourselves using a stabilized argument
4951 list to avoid re-evaluating the function's arguments twice. */
4952 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4953 copy_warning (call, exp);
4954 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4955 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4956 return expand_call (call, target, target == const0_rtx);
4959 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4960 if that's convenient. */
4963 expand_builtin_saveregs (void)
4965 rtx val;
4966 rtx_insn *seq;
4968 /* Don't do __builtin_saveregs more than once in a function.
4969 Save the result of the first call and reuse it. */
4970 if (saveregs_value != 0)
4971 return saveregs_value;
4973 /* When this function is called, it means that registers must be
4974 saved on entry to this function. So we migrate the call to the
4975 first insn of this function. */
4977 start_sequence ();
4979 /* Do whatever the machine needs done in this case. */
4980 val = targetm.calls.expand_builtin_saveregs ();
4982 seq = get_insns ();
4983 end_sequence ();
4985 saveregs_value = val;
4987 /* Put the insns after the NOTE that starts the function. If this
4988 is inside a start_sequence, make the outer-level insn chain current, so
4989 the code is placed at the start of the function. */
4990 push_topmost_sequence ();
4991 emit_insn_after (seq, entry_of_function ());
4992 pop_topmost_sequence ();
4994 return val;
4997 /* Expand a call to __builtin_next_arg. */
4999 static rtx
5000 expand_builtin_next_arg (void)
5002 /* Checking arguments is already done in fold_builtin_next_arg
5003 that must be called before this function. */
5004 return expand_binop (ptr_mode, add_optab,
5005 crtl->args.internal_arg_pointer,
5006 crtl->args.arg_offset_rtx,
5007 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5010 /* Make it easier for the backends by protecting the valist argument
5011 from multiple evaluations. */
5013 static tree
5014 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5016 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5018 /* The current way of determining the type of valist is completely
5019 bogus. We should have the information on the va builtin instead. */
5020 if (!vatype)
5021 vatype = targetm.fn_abi_va_list (cfun->decl);
5023 if (TREE_CODE (vatype) == ARRAY_TYPE)
5025 if (TREE_SIDE_EFFECTS (valist))
5026 valist = save_expr (valist);
5028 /* For this case, the backends will be expecting a pointer to
5029 vatype, but it's possible we've actually been given an array
5030 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5031 So fix it. */
5032 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5034 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5035 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5038 else
5040 tree pt = build_pointer_type (vatype);
5042 if (! needs_lvalue)
5044 if (! TREE_SIDE_EFFECTS (valist))
5045 return valist;
5047 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5048 TREE_SIDE_EFFECTS (valist) = 1;
5051 if (TREE_SIDE_EFFECTS (valist))
5052 valist = save_expr (valist);
5053 valist = fold_build2_loc (loc, MEM_REF,
5054 vatype, valist, build_int_cst (pt, 0));
5057 return valist;
5060 /* The "standard" definition of va_list is void*. */
5062 tree
5063 std_build_builtin_va_list (void)
5065 return ptr_type_node;
5068 /* The "standard" abi va_list is va_list_type_node. */
5070 tree
5071 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5073 return va_list_type_node;
5076 /* The "standard" type of va_list is va_list_type_node. */
5078 tree
5079 std_canonical_va_list_type (tree type)
5081 tree wtype, htype;
5083 wtype = va_list_type_node;
5084 htype = type;
5086 if (TREE_CODE (wtype) == ARRAY_TYPE)
5088 /* If va_list is an array type, the argument may have decayed
5089 to a pointer type, e.g. by being passed to another function.
5090 In that case, unwrap both types so that we can compare the
5091 underlying records. */
5092 if (TREE_CODE (htype) == ARRAY_TYPE
5093 || POINTER_TYPE_P (htype))
5095 wtype = TREE_TYPE (wtype);
5096 htype = TREE_TYPE (htype);
5099 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5100 return va_list_type_node;
5102 return NULL_TREE;
5105 /* The "standard" implementation of va_start: just assign `nextarg' to
5106 the variable. */
5108 void
5109 std_expand_builtin_va_start (tree valist, rtx nextarg)
5111 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5112 convert_move (va_r, nextarg, 0);
5115 /* Expand EXP, a call to __builtin_va_start. */
5117 static rtx
5118 expand_builtin_va_start (tree exp)
5120 rtx nextarg;
5121 tree valist;
5122 location_t loc = EXPR_LOCATION (exp);
5124 if (call_expr_nargs (exp) < 2)
5126 error_at (loc, "too few arguments to function %<va_start%>");
5127 return const0_rtx;
5130 if (fold_builtin_next_arg (exp, true))
5131 return const0_rtx;
5133 nextarg = expand_builtin_next_arg ();
5134 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5136 if (targetm.expand_builtin_va_start)
5137 targetm.expand_builtin_va_start (valist, nextarg);
5138 else
5139 std_expand_builtin_va_start (valist, nextarg);
5141 return const0_rtx;
5144 /* Expand EXP, a call to __builtin_va_end. */
5146 static rtx
5147 expand_builtin_va_end (tree exp)
5149 tree valist = CALL_EXPR_ARG (exp, 0);
5151 /* Evaluate for side effects, if needed. I hate macros that don't
5152 do that. */
5153 if (TREE_SIDE_EFFECTS (valist))
5154 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5156 return const0_rtx;
5159 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5160 builtin rather than just as an assignment in stdarg.h because of the
5161 nastiness of array-type va_list types. */
5163 static rtx
5164 expand_builtin_va_copy (tree exp)
5166 tree dst, src, t;
5167 location_t loc = EXPR_LOCATION (exp);
5169 dst = CALL_EXPR_ARG (exp, 0);
5170 src = CALL_EXPR_ARG (exp, 1);
5172 dst = stabilize_va_list_loc (loc, dst, 1);
5173 src = stabilize_va_list_loc (loc, src, 0);
5175 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5177 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5179 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5180 TREE_SIDE_EFFECTS (t) = 1;
5181 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5183 else
5185 rtx dstb, srcb, size;
5187 /* Evaluate to pointers. */
5188 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5189 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5190 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5191 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5193 dstb = convert_memory_address (Pmode, dstb);
5194 srcb = convert_memory_address (Pmode, srcb);
5196 /* "Dereference" to BLKmode memories. */
5197 dstb = gen_rtx_MEM (BLKmode, dstb);
5198 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5199 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5200 srcb = gen_rtx_MEM (BLKmode, srcb);
5201 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5202 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5204 /* Copy. */
5205 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5208 return const0_rtx;
5211 /* Expand a call to one of the builtin functions __builtin_frame_address or
5212 __builtin_return_address. */
5214 static rtx
5215 expand_builtin_frame_address (tree fndecl, tree exp)
5217 /* The argument must be a nonnegative integer constant.
5218 It counts the number of frames to scan up the stack.
5219 The value is either the frame pointer value or the return
5220 address saved in that frame. */
5221 if (call_expr_nargs (exp) == 0)
5222 /* Warning about missing arg was already issued. */
5223 return const0_rtx;
5224 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5226 error ("invalid argument to %qD", fndecl);
5227 return const0_rtx;
5229 else
5231 /* Number of frames to scan up the stack. */
5232 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5234 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5236 /* Some ports cannot access arbitrary stack frames. */
5237 if (tem == NULL)
5239 warning (0, "unsupported argument to %qD", fndecl);
5240 return const0_rtx;
5243 if (count)
5245 /* Warn since no effort is made to ensure that any frame
5246 beyond the current one exists or can be safely reached. */
5247 warning (OPT_Wframe_address, "calling %qD with "
5248 "a nonzero argument is unsafe", fndecl);
5251 /* For __builtin_frame_address, return what we've got. */
5252 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5253 return tem;
5255 if (!REG_P (tem)
5256 && ! CONSTANT_P (tem))
5257 tem = copy_addr_to_reg (tem);
5258 return tem;
5262 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5263 failed and the caller should emit a normal call. */
5265 static rtx
5266 expand_builtin_alloca (tree exp)
5268 rtx op0;
5269 rtx result;
5270 unsigned int align;
5271 tree fndecl = get_callee_fndecl (exp);
5272 HOST_WIDE_INT max_size;
5273 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5274 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5275 bool valid_arglist
5276 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5277 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5278 VOID_TYPE)
5279 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5280 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5281 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5283 if (!valid_arglist)
5284 return NULL_RTX;
5286 /* Compute the argument. */
5287 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5289 /* Compute the alignment. */
5290 align = (fcode == BUILT_IN_ALLOCA
5291 ? BIGGEST_ALIGNMENT
5292 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5294 /* Compute the maximum size. */
5295 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5296 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5297 : -1);
5299 /* Allocate the desired space. If the allocation stems from the declaration
5300 of a variable-sized object, it cannot accumulate. */
5301 result
5302 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5303 result = convert_memory_address (ptr_mode, result);
5305 /* Dynamic allocations for variables are recorded during gimplification. */
5306 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5307 record_dynamic_alloc (exp);
5309 return result;
5312 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5313 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5314 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5315 handle_builtin_stack_restore function. */
5317 static rtx
5318 expand_asan_emit_allocas_unpoison (tree exp)
5320 tree arg0 = CALL_EXPR_ARG (exp, 0);
5321 tree arg1 = CALL_EXPR_ARG (exp, 1);
5322 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5323 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5324 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5325 stack_pointer_rtx, NULL_RTX, 0,
5326 OPTAB_LIB_WIDEN);
5327 off = convert_modes (ptr_mode, Pmode, off, 0);
5328 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5329 OPTAB_LIB_WIDEN);
5330 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5331 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5332 top, ptr_mode, bot, ptr_mode);
5333 return ret;
5336 /* Expand a call to bswap builtin in EXP.
5337 Return NULL_RTX if a normal call should be emitted rather than expanding the
5338 function in-line. If convenient, the result should be placed in TARGET.
5339 SUBTARGET may be used as the target for computing one of EXP's operands. */
5341 static rtx
5342 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5343 rtx subtarget)
5345 tree arg;
5346 rtx op0;
5348 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5349 return NULL_RTX;
5351 arg = CALL_EXPR_ARG (exp, 0);
5352 op0 = expand_expr (arg,
5353 subtarget && GET_MODE (subtarget) == target_mode
5354 ? subtarget : NULL_RTX,
5355 target_mode, EXPAND_NORMAL);
5356 if (GET_MODE (op0) != target_mode)
5357 op0 = convert_to_mode (target_mode, op0, 1);
5359 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5361 gcc_assert (target);
5363 return convert_to_mode (target_mode, target, 1);
5366 /* Expand a call to a unary builtin in EXP.
5367 Return NULL_RTX if a normal call should be emitted rather than expanding the
5368 function in-line. If convenient, the result should be placed in TARGET.
5369 SUBTARGET may be used as the target for computing one of EXP's operands. */
5371 static rtx
5372 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5373 rtx subtarget, optab op_optab)
5375 rtx op0;
5377 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5378 return NULL_RTX;
5380 /* Compute the argument. */
5381 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5382 (subtarget
5383 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5384 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5385 VOIDmode, EXPAND_NORMAL);
5386 /* Compute op, into TARGET if possible.
5387 Set TARGET to wherever the result comes back. */
5388 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5389 op_optab, op0, target, op_optab != clrsb_optab);
5390 gcc_assert (target);
5392 return convert_to_mode (target_mode, target, 0);
5395 /* Expand a call to __builtin_expect. We just return our argument
5396 as the builtin_expect semantic should've been already executed by
5397 tree branch prediction pass. */
5399 static rtx
5400 expand_builtin_expect (tree exp, rtx target)
5402 tree arg;
5404 if (call_expr_nargs (exp) < 2)
5405 return const0_rtx;
5406 arg = CALL_EXPR_ARG (exp, 0);
5408 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5409 /* When guessing was done, the hints should be already stripped away. */
5410 gcc_assert (!flag_guess_branch_prob
5411 || optimize == 0 || seen_error ());
5412 return target;
5415 /* Expand a call to __builtin_expect_with_probability. We just return our
5416 argument as the builtin_expect semantic should've been already executed by
5417 tree branch prediction pass. */
5419 static rtx
5420 expand_builtin_expect_with_probability (tree exp, rtx target)
5422 tree arg;
5424 if (call_expr_nargs (exp) < 3)
5425 return const0_rtx;
5426 arg = CALL_EXPR_ARG (exp, 0);
5428 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5429 /* When guessing was done, the hints should be already stripped away. */
5430 gcc_assert (!flag_guess_branch_prob
5431 || optimize == 0 || seen_error ());
5432 return target;
5436 /* Expand a call to __builtin_assume_aligned. We just return our first
5437 argument as the builtin_assume_aligned semantic should've been already
5438 executed by CCP. */
5440 static rtx
5441 expand_builtin_assume_aligned (tree exp, rtx target)
5443 if (call_expr_nargs (exp) < 2)
5444 return const0_rtx;
5445 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5446 EXPAND_NORMAL);
5447 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5448 && (call_expr_nargs (exp) < 3
5449 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5450 return target;
5453 void
5454 expand_builtin_trap (void)
5456 if (targetm.have_trap ())
5458 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5459 /* For trap insns when not accumulating outgoing args force
5460 REG_ARGS_SIZE note to prevent crossjumping of calls with
5461 different args sizes. */
5462 if (!ACCUMULATE_OUTGOING_ARGS)
5463 add_args_size_note (insn, stack_pointer_delta);
5465 else
5467 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5468 tree call_expr = build_call_expr (fn, 0);
5469 expand_call (call_expr, NULL_RTX, false);
5472 emit_barrier ();
5475 /* Expand a call to __builtin_unreachable. We do nothing except emit
5476 a barrier saying that control flow will not pass here.
5478 It is the responsibility of the program being compiled to ensure
5479 that control flow does never reach __builtin_unreachable. */
5480 static void
5481 expand_builtin_unreachable (void)
5483 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5484 to avoid this. */
5485 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5486 emit_barrier ();
5489 /* Expand EXP, a call to fabs, fabsf or fabsl.
5490 Return NULL_RTX if a normal call should be emitted rather than expanding
5491 the function inline. If convenient, the result should be placed
5492 in TARGET. SUBTARGET may be used as the target for computing
5493 the operand. */
5495 static rtx
5496 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5498 machine_mode mode;
5499 tree arg;
5500 rtx op0;
5502 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5503 return NULL_RTX;
5505 arg = CALL_EXPR_ARG (exp, 0);
5506 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5507 mode = TYPE_MODE (TREE_TYPE (arg));
5508 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5509 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5512 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5513 Return NULL is a normal call should be emitted rather than expanding the
5514 function inline. If convenient, the result should be placed in TARGET.
5515 SUBTARGET may be used as the target for computing the operand. */
5517 static rtx
5518 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5520 rtx op0, op1;
5521 tree arg;
5523 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5524 return NULL_RTX;
5526 arg = CALL_EXPR_ARG (exp, 0);
5527 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5529 arg = CALL_EXPR_ARG (exp, 1);
5530 op1 = expand_normal (arg);
5532 return expand_copysign (op0, op1, target);
5535 /* Emit a call to __builtin___clear_cache. */
5537 void
5538 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5540 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5541 BUILTIN_ASM_NAME_PTR
5542 (BUILT_IN_CLEAR_CACHE));
5544 emit_library_call (callee,
5545 LCT_NORMAL, VOIDmode,
5546 convert_memory_address (ptr_mode, begin), ptr_mode,
5547 convert_memory_address (ptr_mode, end), ptr_mode);
5550 /* Emit a call to __builtin___clear_cache, unless the target specifies
5551 it as do-nothing. This function can be used by trampoline
5552 finalizers to duplicate the effects of expanding a call to the
5553 clear_cache builtin. */
5555 void
5556 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5558 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5559 || CONST_INT_P (begin))
5560 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5561 || CONST_INT_P (end)));
5563 if (targetm.have_clear_cache ())
5565 /* We have a "clear_cache" insn, and it will handle everything. */
5566 class expand_operand ops[2];
5568 create_address_operand (&ops[0], begin);
5569 create_address_operand (&ops[1], end);
5571 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5572 return;
5574 else
5576 #ifndef CLEAR_INSN_CACHE
5577 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5578 does nothing. There is no need to call it. Do nothing. */
5579 return;
5580 #endif /* CLEAR_INSN_CACHE */
5583 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5586 /* Expand a call to __builtin___clear_cache. */
5588 static void
5589 expand_builtin___clear_cache (tree exp)
5591 tree begin, end;
5592 rtx begin_rtx, end_rtx;
5594 /* We must not expand to a library call. If we did, any
5595 fallback library function in libgcc that might contain a call to
5596 __builtin___clear_cache() would recurse infinitely. */
5597 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5599 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5600 return;
5603 begin = CALL_EXPR_ARG (exp, 0);
5604 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5606 end = CALL_EXPR_ARG (exp, 1);
5607 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5609 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5612 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5614 static rtx
5615 round_trampoline_addr (rtx tramp)
5617 rtx temp, addend, mask;
5619 /* If we don't need too much alignment, we'll have been guaranteed
5620 proper alignment by get_trampoline_type. */
5621 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5622 return tramp;
5624 /* Round address up to desired boundary. */
5625 temp = gen_reg_rtx (Pmode);
5626 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5627 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5629 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5630 temp, 0, OPTAB_LIB_WIDEN);
5631 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5632 temp, 0, OPTAB_LIB_WIDEN);
5634 return tramp;
5637 static rtx
5638 expand_builtin_init_trampoline (tree exp, bool onstack)
5640 tree t_tramp, t_func, t_chain;
5641 rtx m_tramp, r_tramp, r_chain, tmp;
5643 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5644 POINTER_TYPE, VOID_TYPE))
5645 return NULL_RTX;
5647 t_tramp = CALL_EXPR_ARG (exp, 0);
5648 t_func = CALL_EXPR_ARG (exp, 1);
5649 t_chain = CALL_EXPR_ARG (exp, 2);
5651 r_tramp = expand_normal (t_tramp);
5652 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5653 MEM_NOTRAP_P (m_tramp) = 1;
5655 /* If ONSTACK, the TRAMP argument should be the address of a field
5656 within the local function's FRAME decl. Either way, let's see if
5657 we can fill in the MEM_ATTRs for this memory. */
5658 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5659 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5661 /* Creator of a heap trampoline is responsible for making sure the
5662 address is aligned to at least STACK_BOUNDARY. Normally malloc
5663 will ensure this anyhow. */
5664 tmp = round_trampoline_addr (r_tramp);
5665 if (tmp != r_tramp)
5667 m_tramp = change_address (m_tramp, BLKmode, tmp);
5668 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5669 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5672 /* The FUNC argument should be the address of the nested function.
5673 Extract the actual function decl to pass to the hook. */
5674 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5675 t_func = TREE_OPERAND (t_func, 0);
5676 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5678 r_chain = expand_normal (t_chain);
5680 /* Generate insns to initialize the trampoline. */
5681 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5683 if (onstack)
5685 trampolines_created = 1;
5687 if (targetm.calls.custom_function_descriptors != 0)
5688 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5689 "trampoline generated for nested function %qD", t_func);
5692 return const0_rtx;
5695 static rtx
5696 expand_builtin_adjust_trampoline (tree exp)
5698 rtx tramp;
5700 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5701 return NULL_RTX;
5703 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5704 tramp = round_trampoline_addr (tramp);
5705 if (targetm.calls.trampoline_adjust_address)
5706 tramp = targetm.calls.trampoline_adjust_address (tramp);
5708 return tramp;
5711 /* Expand a call to the builtin descriptor initialization routine.
5712 A descriptor is made up of a couple of pointers to the static
5713 chain and the code entry in this order. */
5715 static rtx
5716 expand_builtin_init_descriptor (tree exp)
5718 tree t_descr, t_func, t_chain;
5719 rtx m_descr, r_descr, r_func, r_chain;
5721 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5722 VOID_TYPE))
5723 return NULL_RTX;
5725 t_descr = CALL_EXPR_ARG (exp, 0);
5726 t_func = CALL_EXPR_ARG (exp, 1);
5727 t_chain = CALL_EXPR_ARG (exp, 2);
5729 r_descr = expand_normal (t_descr);
5730 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5731 MEM_NOTRAP_P (m_descr) = 1;
5732 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5734 r_func = expand_normal (t_func);
5735 r_chain = expand_normal (t_chain);
5737 /* Generate insns to initialize the descriptor. */
5738 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5739 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5740 POINTER_SIZE / BITS_PER_UNIT), r_func);
5742 return const0_rtx;
5745 /* Expand a call to the builtin descriptor adjustment routine. */
5747 static rtx
5748 expand_builtin_adjust_descriptor (tree exp)
5750 rtx tramp;
5752 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5753 return NULL_RTX;
5755 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5757 /* Unalign the descriptor to allow runtime identification. */
5758 tramp = plus_constant (ptr_mode, tramp,
5759 targetm.calls.custom_function_descriptors);
5761 return force_operand (tramp, NULL_RTX);
5764 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5765 function. The function first checks whether the back end provides
5766 an insn to implement signbit for the respective mode. If not, it
5767 checks whether the floating point format of the value is such that
5768 the sign bit can be extracted. If that is not the case, error out.
5769 EXP is the expression that is a call to the builtin function; if
5770 convenient, the result should be placed in TARGET. */
5771 static rtx
5772 expand_builtin_signbit (tree exp, rtx target)
5774 const struct real_format *fmt;
5775 scalar_float_mode fmode;
5776 scalar_int_mode rmode, imode;
5777 tree arg;
5778 int word, bitpos;
5779 enum insn_code icode;
5780 rtx temp;
5781 location_t loc = EXPR_LOCATION (exp);
5783 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5784 return NULL_RTX;
5786 arg = CALL_EXPR_ARG (exp, 0);
5787 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5788 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5789 fmt = REAL_MODE_FORMAT (fmode);
5791 arg = builtin_save_expr (arg);
5793 /* Expand the argument yielding a RTX expression. */
5794 temp = expand_normal (arg);
5796 /* Check if the back end provides an insn that handles signbit for the
5797 argument's mode. */
5798 icode = optab_handler (signbit_optab, fmode);
5799 if (icode != CODE_FOR_nothing)
5801 rtx_insn *last = get_last_insn ();
5802 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5803 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
5804 return this_target;
5805 delete_insns_since (last);
5808 /* For floating point formats without a sign bit, implement signbit
5809 as "ARG < 0.0". */
5810 bitpos = fmt->signbit_ro;
5811 if (bitpos < 0)
5813 /* But we can't do this if the format supports signed zero. */
5814 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5816 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5817 build_real (TREE_TYPE (arg), dconst0));
5818 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5821 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5823 imode = int_mode_for_mode (fmode).require ();
5824 temp = gen_lowpart (imode, temp);
5826 else
5828 imode = word_mode;
5829 /* Handle targets with different FP word orders. */
5830 if (FLOAT_WORDS_BIG_ENDIAN)
5831 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5832 else
5833 word = bitpos / BITS_PER_WORD;
5834 temp = operand_subword_force (temp, word, fmode);
5835 bitpos = bitpos % BITS_PER_WORD;
5838 /* Force the intermediate word_mode (or narrower) result into a
5839 register. This avoids attempting to create paradoxical SUBREGs
5840 of floating point modes below. */
5841 temp = force_reg (imode, temp);
5843 /* If the bitpos is within the "result mode" lowpart, the operation
5844 can be implement with a single bitwise AND. Otherwise, we need
5845 a right shift and an AND. */
5847 if (bitpos < GET_MODE_BITSIZE (rmode))
5849 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5851 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5852 temp = gen_lowpart (rmode, temp);
5853 temp = expand_binop (rmode, and_optab, temp,
5854 immed_wide_int_const (mask, rmode),
5855 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5857 else
5859 /* Perform a logical right shift to place the signbit in the least
5860 significant bit, then truncate the result to the desired mode
5861 and mask just this bit. */
5862 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5863 temp = gen_lowpart (rmode, temp);
5864 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5865 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5868 return temp;
5871 /* Expand fork or exec calls. TARGET is the desired target of the
5872 call. EXP is the call. FN is the
5873 identificator of the actual function. IGNORE is nonzero if the
5874 value is to be ignored. */
5876 static rtx
5877 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5879 tree id, decl;
5880 tree call;
5882 /* If we are not profiling, just call the function. */
5883 if (!profile_arc_flag)
5884 return NULL_RTX;
5886 /* Otherwise call the wrapper. This should be equivalent for the rest of
5887 compiler, so the code does not diverge, and the wrapper may run the
5888 code necessary for keeping the profiling sane. */
5890 switch (DECL_FUNCTION_CODE (fn))
5892 case BUILT_IN_FORK:
5893 id = get_identifier ("__gcov_fork");
5894 break;
5896 case BUILT_IN_EXECL:
5897 id = get_identifier ("__gcov_execl");
5898 break;
5900 case BUILT_IN_EXECV:
5901 id = get_identifier ("__gcov_execv");
5902 break;
5904 case BUILT_IN_EXECLP:
5905 id = get_identifier ("__gcov_execlp");
5906 break;
5908 case BUILT_IN_EXECLE:
5909 id = get_identifier ("__gcov_execle");
5910 break;
5912 case BUILT_IN_EXECVP:
5913 id = get_identifier ("__gcov_execvp");
5914 break;
5916 case BUILT_IN_EXECVE:
5917 id = get_identifier ("__gcov_execve");
5918 break;
5920 default:
5921 gcc_unreachable ();
5924 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5925 FUNCTION_DECL, id, TREE_TYPE (fn));
5926 DECL_EXTERNAL (decl) = 1;
5927 TREE_PUBLIC (decl) = 1;
5928 DECL_ARTIFICIAL (decl) = 1;
5929 TREE_NOTHROW (decl) = 1;
5930 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5931 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5932 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5933 return expand_call (call, target, ignore);
5938 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5939 the pointer in these functions is void*, the tree optimizers may remove
5940 casts. The mode computed in expand_builtin isn't reliable either, due
5941 to __sync_bool_compare_and_swap.
5943 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5944 group of builtins. This gives us log2 of the mode size. */
5946 static inline machine_mode
5947 get_builtin_sync_mode (int fcode_diff)
5949 /* The size is not negotiable, so ask not to get BLKmode in return
5950 if the target indicates that a smaller size would be better. */
5951 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5954 /* Expand the memory expression LOC and return the appropriate memory operand
5955 for the builtin_sync operations. */
5957 static rtx
5958 get_builtin_sync_mem (tree loc, machine_mode mode)
5960 rtx addr, mem;
5961 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5962 ? TREE_TYPE (TREE_TYPE (loc))
5963 : TREE_TYPE (loc));
5964 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5966 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5967 addr = convert_memory_address (addr_mode, addr);
5969 /* Note that we explicitly do not want any alias information for this
5970 memory, so that we kill all other live memories. Otherwise we don't
5971 satisfy the full barrier semantics of the intrinsic. */
5972 mem = gen_rtx_MEM (mode, addr);
5974 set_mem_addr_space (mem, addr_space);
5976 mem = validize_mem (mem);
5978 /* The alignment needs to be at least according to that of the mode. */
5979 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5980 get_pointer_alignment (loc)));
5981 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5982 MEM_VOLATILE_P (mem) = 1;
5984 return mem;
5987 /* Make sure an argument is in the right mode.
5988 EXP is the tree argument.
5989 MODE is the mode it should be in. */
5991 static rtx
5992 expand_expr_force_mode (tree exp, machine_mode mode)
5994 rtx val;
5995 machine_mode old_mode;
5997 if (TREE_CODE (exp) == SSA_NAME
5998 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6000 /* Undo argument promotion if possible, as combine might not
6001 be able to do it later due to MEM_VOLATILE_P uses in the
6002 patterns. */
6003 gimple *g = get_gimple_for_ssa_name (exp);
6004 if (g && gimple_assign_cast_p (g))
6006 tree rhs = gimple_assign_rhs1 (g);
6007 tree_code code = gimple_assign_rhs_code (g);
6008 if (CONVERT_EXPR_CODE_P (code)
6009 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6010 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6011 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6012 && (TYPE_PRECISION (TREE_TYPE (exp))
6013 > TYPE_PRECISION (TREE_TYPE (rhs))))
6014 exp = rhs;
6018 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6019 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6020 of CONST_INTs, where we know the old_mode only from the call argument. */
6022 old_mode = GET_MODE (val);
6023 if (old_mode == VOIDmode)
6024 old_mode = TYPE_MODE (TREE_TYPE (exp));
6025 val = convert_modes (mode, old_mode, val, 1);
6026 return val;
6030 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6031 EXP is the CALL_EXPR. CODE is the rtx code
6032 that corresponds to the arithmetic or logical operation from the name;
6033 an exception here is that NOT actually means NAND. TARGET is an optional
6034 place for us to store the results; AFTER is true if this is the
6035 fetch_and_xxx form. */
6037 static rtx
6038 expand_builtin_sync_operation (machine_mode mode, tree exp,
6039 enum rtx_code code, bool after,
6040 rtx target)
6042 rtx val, mem;
6043 location_t loc = EXPR_LOCATION (exp);
6045 if (code == NOT && warn_sync_nand)
6047 tree fndecl = get_callee_fndecl (exp);
6048 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6050 static bool warned_f_a_n, warned_n_a_f;
6052 switch (fcode)
6054 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6055 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6056 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6057 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6058 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6059 if (warned_f_a_n)
6060 break;
6062 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6063 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6064 warned_f_a_n = true;
6065 break;
6067 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6068 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6069 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6070 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6071 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6072 if (warned_n_a_f)
6073 break;
6075 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6076 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6077 warned_n_a_f = true;
6078 break;
6080 default:
6081 gcc_unreachable ();
6085 /* Expand the operands. */
6086 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6087 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6089 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6090 after);
6093 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6094 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6095 true if this is the boolean form. TARGET is a place for us to store the
6096 results; this is NOT optional if IS_BOOL is true. */
6098 static rtx
6099 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6100 bool is_bool, rtx target)
6102 rtx old_val, new_val, mem;
6103 rtx *pbool, *poval;
6105 /* Expand the operands. */
6106 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6107 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6108 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6110 pbool = poval = NULL;
6111 if (target != const0_rtx)
6113 if (is_bool)
6114 pbool = &target;
6115 else
6116 poval = &target;
6118 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6119 false, MEMMODEL_SYNC_SEQ_CST,
6120 MEMMODEL_SYNC_SEQ_CST))
6121 return NULL_RTX;
6123 return target;
6126 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6127 general form is actually an atomic exchange, and some targets only
6128 support a reduced form with the second argument being a constant 1.
6129 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6130 the results. */
6132 static rtx
6133 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6134 rtx target)
6136 rtx val, mem;
6138 /* Expand the operands. */
6139 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6142 return expand_sync_lock_test_and_set (target, mem, val);
6145 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6147 static void
6148 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6150 rtx mem;
6152 /* Expand the operands. */
6153 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6155 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6158 /* Given an integer representing an ``enum memmodel'', verify its
6159 correctness and return the memory model enum. */
6161 static enum memmodel
6162 get_memmodel (tree exp)
6164 /* If the parameter is not a constant, it's a run time value so we'll just
6165 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6166 if (TREE_CODE (exp) != INTEGER_CST)
6167 return MEMMODEL_SEQ_CST;
6169 rtx op = expand_normal (exp);
6171 unsigned HOST_WIDE_INT val = INTVAL (op);
6172 if (targetm.memmodel_check)
6173 val = targetm.memmodel_check (val);
6174 else if (val & ~MEMMODEL_MASK)
6175 return MEMMODEL_SEQ_CST;
6177 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6178 if (memmodel_base (val) >= MEMMODEL_LAST)
6179 return MEMMODEL_SEQ_CST;
6181 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6182 be conservative and promote consume to acquire. */
6183 if (val == MEMMODEL_CONSUME)
6184 val = MEMMODEL_ACQUIRE;
6186 return (enum memmodel) val;
6189 /* Expand the __atomic_exchange intrinsic:
6190 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6191 EXP is the CALL_EXPR.
6192 TARGET is an optional place for us to store the results. */
6194 static rtx
6195 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6197 rtx val, mem;
6198 enum memmodel model;
6200 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6202 if (!flag_inline_atomics)
6203 return NULL_RTX;
6205 /* Expand the operands. */
6206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6207 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6209 return expand_atomic_exchange (target, mem, val, model);
6212 /* Expand the __atomic_compare_exchange intrinsic:
6213 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6214 TYPE desired, BOOL weak,
6215 enum memmodel success,
6216 enum memmodel failure)
6217 EXP is the CALL_EXPR.
6218 TARGET is an optional place for us to store the results. */
6220 static rtx
6221 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6222 rtx target)
6224 rtx expect, desired, mem, oldval;
6225 rtx_code_label *label;
6226 tree weak;
6227 bool is_weak;
6229 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6230 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6232 if (failure > success)
6233 success = MEMMODEL_SEQ_CST;
6235 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6237 failure = MEMMODEL_SEQ_CST;
6238 success = MEMMODEL_SEQ_CST;
6242 if (!flag_inline_atomics)
6243 return NULL_RTX;
6245 /* Expand the operands. */
6246 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6248 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6249 expect = convert_memory_address (Pmode, expect);
6250 expect = gen_rtx_MEM (mode, expect);
6251 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6253 weak = CALL_EXPR_ARG (exp, 3);
6254 is_weak = false;
6255 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6256 is_weak = true;
6258 if (target == const0_rtx)
6259 target = NULL;
6261 /* Lest the rtl backend create a race condition with an imporoper store
6262 to memory, always create a new pseudo for OLDVAL. */
6263 oldval = NULL;
6265 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6266 is_weak, success, failure))
6267 return NULL_RTX;
6269 /* Conditionally store back to EXPECT, lest we create a race condition
6270 with an improper store to memory. */
6271 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6272 the normal case where EXPECT is totally private, i.e. a register. At
6273 which point the store can be unconditional. */
6274 label = gen_label_rtx ();
6275 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6276 GET_MODE (target), 1, label);
6277 emit_move_insn (expect, oldval);
6278 emit_label (label);
6280 return target;
6283 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6284 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6285 call. The weak parameter must be dropped to match the expected parameter
6286 list and the expected argument changed from value to pointer to memory
6287 slot. */
6289 static void
6290 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6292 unsigned int z;
6293 vec<tree, va_gc> *vec;
6295 vec_alloc (vec, 5);
6296 vec->quick_push (gimple_call_arg (call, 0));
6297 tree expected = gimple_call_arg (call, 1);
6298 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6299 TREE_TYPE (expected));
6300 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6301 if (expd != x)
6302 emit_move_insn (x, expd);
6303 tree v = make_tree (TREE_TYPE (expected), x);
6304 vec->quick_push (build1 (ADDR_EXPR,
6305 build_pointer_type (TREE_TYPE (expected)), v));
6306 vec->quick_push (gimple_call_arg (call, 2));
6307 /* Skip the boolean weak parameter. */
6308 for (z = 4; z < 6; z++)
6309 vec->quick_push (gimple_call_arg (call, z));
6310 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6311 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6312 gcc_assert (bytes_log2 < 5);
6313 built_in_function fncode
6314 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6315 + bytes_log2);
6316 tree fndecl = builtin_decl_explicit (fncode);
6317 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6318 fndecl);
6319 tree exp = build_call_vec (boolean_type_node, fn, vec);
6320 tree lhs = gimple_call_lhs (call);
6321 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6322 if (lhs)
6324 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6325 if (GET_MODE (boolret) != mode)
6326 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6327 x = force_reg (mode, x);
6328 write_complex_part (target, boolret, true, true);
6329 write_complex_part (target, x, false, false);
6333 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6335 void
6336 expand_ifn_atomic_compare_exchange (gcall *call)
6338 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6339 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6340 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6342 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6343 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6345 if (failure > success)
6346 success = MEMMODEL_SEQ_CST;
6348 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6350 failure = MEMMODEL_SEQ_CST;
6351 success = MEMMODEL_SEQ_CST;
6354 if (!flag_inline_atomics)
6356 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6357 return;
6360 /* Expand the operands. */
6361 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6363 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6364 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6366 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6368 rtx boolret = NULL;
6369 rtx oldval = NULL;
6371 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6372 is_weak, success, failure))
6374 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6375 return;
6378 tree lhs = gimple_call_lhs (call);
6379 if (lhs)
6381 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6382 if (GET_MODE (boolret) != mode)
6383 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6384 write_complex_part (target, boolret, true, true);
6385 write_complex_part (target, oldval, false, false);
6389 /* Expand the __atomic_load intrinsic:
6390 TYPE __atomic_load (TYPE *object, enum memmodel)
6391 EXP is the CALL_EXPR.
6392 TARGET is an optional place for us to store the results. */
6394 static rtx
6395 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6397 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6398 if (is_mm_release (model) || is_mm_acq_rel (model))
6399 model = MEMMODEL_SEQ_CST;
6401 if (!flag_inline_atomics)
6402 return NULL_RTX;
6404 /* Expand the operand. */
6405 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6407 return expand_atomic_load (target, mem, model);
6411 /* Expand the __atomic_store intrinsic:
6412 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6413 EXP is the CALL_EXPR.
6414 TARGET is an optional place for us to store the results. */
6416 static rtx
6417 expand_builtin_atomic_store (machine_mode mode, tree exp)
6419 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6420 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6421 || is_mm_release (model)))
6422 model = MEMMODEL_SEQ_CST;
6424 if (!flag_inline_atomics)
6425 return NULL_RTX;
6427 /* Expand the operands. */
6428 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6429 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6431 return expand_atomic_store (mem, val, model, false);
6434 /* Expand the __atomic_fetch_XXX intrinsic:
6435 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6436 EXP is the CALL_EXPR.
6437 TARGET is an optional place for us to store the results.
6438 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6439 FETCH_AFTER is true if returning the result of the operation.
6440 FETCH_AFTER is false if returning the value before the operation.
6441 IGNORE is true if the result is not used.
6442 EXT_CALL is the correct builtin for an external call if this cannot be
6443 resolved to an instruction sequence. */
6445 static rtx
6446 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6447 enum rtx_code code, bool fetch_after,
6448 bool ignore, enum built_in_function ext_call)
6450 rtx val, mem, ret;
6451 enum memmodel model;
6452 tree fndecl;
6453 tree addr;
6455 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6457 /* Expand the operands. */
6458 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6459 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6461 /* Only try generating instructions if inlining is turned on. */
6462 if (flag_inline_atomics)
6464 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6465 if (ret)
6466 return ret;
6469 /* Return if a different routine isn't needed for the library call. */
6470 if (ext_call == BUILT_IN_NONE)
6471 return NULL_RTX;
6473 /* Change the call to the specified function. */
6474 fndecl = get_callee_fndecl (exp);
6475 addr = CALL_EXPR_FN (exp);
6476 STRIP_NOPS (addr);
6478 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6479 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6481 /* If we will emit code after the call, the call cannot be a tail call.
6482 If it is emitted as a tail call, a barrier is emitted after it, and
6483 then all trailing code is removed. */
6484 if (!ignore)
6485 CALL_EXPR_TAILCALL (exp) = 0;
6487 /* Expand the call here so we can emit trailing code. */
6488 ret = expand_call (exp, target, ignore);
6490 /* Replace the original function just in case it matters. */
6491 TREE_OPERAND (addr, 0) = fndecl;
6493 /* Then issue the arithmetic correction to return the right result. */
6494 if (!ignore)
6496 if (code == NOT)
6498 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6499 OPTAB_LIB_WIDEN);
6500 ret = expand_simple_unop (mode, NOT, ret, target, true);
6502 else
6503 ret = expand_simple_binop (mode, code, ret, val, target, true,
6504 OPTAB_LIB_WIDEN);
6506 return ret;
6509 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6511 void
6512 expand_ifn_atomic_bit_test_and (gcall *call)
6514 tree ptr = gimple_call_arg (call, 0);
6515 tree bit = gimple_call_arg (call, 1);
6516 tree flag = gimple_call_arg (call, 2);
6517 tree lhs = gimple_call_lhs (call);
6518 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6519 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6520 enum rtx_code code;
6521 optab optab;
6522 class expand_operand ops[5];
6524 gcc_assert (flag_inline_atomics);
6526 if (gimple_call_num_args (call) == 5)
6527 model = get_memmodel (gimple_call_arg (call, 3));
6529 rtx mem = get_builtin_sync_mem (ptr, mode);
6530 rtx val = expand_expr_force_mode (bit, mode);
6532 switch (gimple_call_internal_fn (call))
6534 case IFN_ATOMIC_BIT_TEST_AND_SET:
6535 code = IOR;
6536 optab = atomic_bit_test_and_set_optab;
6537 break;
6538 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6539 code = XOR;
6540 optab = atomic_bit_test_and_complement_optab;
6541 break;
6542 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6543 code = AND;
6544 optab = atomic_bit_test_and_reset_optab;
6545 break;
6546 default:
6547 gcc_unreachable ();
6550 if (lhs == NULL_TREE)
6552 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6553 val, NULL_RTX, true, OPTAB_DIRECT);
6554 if (code == AND)
6555 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6556 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6557 return;
6560 rtx target;
6561 if (lhs)
6562 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6563 else
6564 target = gen_reg_rtx (mode);
6565 enum insn_code icode = direct_optab_handler (optab, mode);
6566 gcc_assert (icode != CODE_FOR_nothing);
6567 create_output_operand (&ops[0], target, mode);
6568 create_fixed_operand (&ops[1], mem);
6569 create_convert_operand_to (&ops[2], val, mode, true);
6570 create_integer_operand (&ops[3], model);
6571 create_integer_operand (&ops[4], integer_onep (flag));
6572 if (maybe_expand_insn (icode, 5, ops))
6573 return;
6575 rtx bitval = val;
6576 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6577 val, NULL_RTX, true, OPTAB_DIRECT);
6578 rtx maskval = val;
6579 if (code == AND)
6580 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6581 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6582 code, model, false);
6583 if (!result)
6585 bool is_atomic = gimple_call_num_args (call) == 5;
6586 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6587 tree fndecl = gimple_call_addr_fndecl (tcall);
6588 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6589 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6590 make_tree (type, val),
6591 is_atomic
6592 ? gimple_call_arg (call, 3)
6593 : integer_zero_node);
6594 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6595 mode, !lhs);
6597 if (!lhs)
6598 return;
6599 if (integer_onep (flag))
6601 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6602 NULL_RTX, true, OPTAB_DIRECT);
6603 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6604 true, OPTAB_DIRECT);
6606 else
6607 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6608 OPTAB_DIRECT);
6609 if (result != target)
6610 emit_move_insn (target, result);
6613 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6615 void
6616 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6618 tree cmp = gimple_call_arg (call, 0);
6619 tree ptr = gimple_call_arg (call, 1);
6620 tree arg = gimple_call_arg (call, 2);
6621 tree lhs = gimple_call_lhs (call);
6622 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6623 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6624 optab optab;
6625 rtx_code code;
6626 class expand_operand ops[5];
6628 gcc_assert (flag_inline_atomics);
6630 if (gimple_call_num_args (call) == 5)
6631 model = get_memmodel (gimple_call_arg (call, 3));
6633 rtx mem = get_builtin_sync_mem (ptr, mode);
6634 rtx op = expand_expr_force_mode (arg, mode);
6636 switch (gimple_call_internal_fn (call))
6638 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6639 code = PLUS;
6640 optab = atomic_add_fetch_cmp_0_optab;
6641 break;
6642 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6643 code = MINUS;
6644 optab = atomic_sub_fetch_cmp_0_optab;
6645 break;
6646 case IFN_ATOMIC_AND_FETCH_CMP_0:
6647 code = AND;
6648 optab = atomic_and_fetch_cmp_0_optab;
6649 break;
6650 case IFN_ATOMIC_OR_FETCH_CMP_0:
6651 code = IOR;
6652 optab = atomic_or_fetch_cmp_0_optab;
6653 break;
6654 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6655 code = XOR;
6656 optab = atomic_xor_fetch_cmp_0_optab;
6657 break;
6658 default:
6659 gcc_unreachable ();
6662 enum rtx_code comp = UNKNOWN;
6663 switch (tree_to_uhwi (cmp))
6665 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6666 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6667 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6668 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6669 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6670 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6671 default: gcc_unreachable ();
6674 rtx target;
6675 if (lhs == NULL_TREE)
6676 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6677 else
6678 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6679 enum insn_code icode = direct_optab_handler (optab, mode);
6680 gcc_assert (icode != CODE_FOR_nothing);
6681 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6682 create_fixed_operand (&ops[1], mem);
6683 create_convert_operand_to (&ops[2], op, mode, true);
6684 create_integer_operand (&ops[3], model);
6685 create_integer_operand (&ops[4], comp);
6686 if (maybe_expand_insn (icode, 5, ops))
6687 return;
6689 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6690 code, model, true);
6691 if (!result)
6693 bool is_atomic = gimple_call_num_args (call) == 5;
6694 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6695 tree fndecl = gimple_call_addr_fndecl (tcall);
6696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6697 tree exp = build_call_nary (type, tcall,
6698 2 + is_atomic, ptr, arg,
6699 is_atomic
6700 ? gimple_call_arg (call, 3)
6701 : integer_zero_node);
6702 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6703 mode, !lhs);
6706 if (lhs)
6708 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6709 0, 1);
6710 if (result != target)
6711 emit_move_insn (target, result);
6715 /* Expand an atomic clear operation.
6716 void _atomic_clear (BOOL *obj, enum memmodel)
6717 EXP is the call expression. */
6719 static rtx
6720 expand_builtin_atomic_clear (tree exp)
6722 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6723 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6724 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6726 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6727 model = MEMMODEL_SEQ_CST;
6729 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6730 Failing that, a store is issued by __atomic_store. The only way this can
6731 fail is if the bool type is larger than a word size. Unlikely, but
6732 handle it anyway for completeness. Assume a single threaded model since
6733 there is no atomic support in this case, and no barriers are required. */
6734 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
6735 if (!ret)
6736 emit_move_insn (mem, const0_rtx);
6737 return const0_rtx;
6740 /* Expand an atomic test_and_set operation.
6741 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6742 EXP is the call expression. */
6744 static rtx
6745 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6747 rtx mem;
6748 enum memmodel model;
6749 machine_mode mode;
6751 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6752 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6753 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6755 return expand_atomic_test_and_set (target, mem, model);
6759 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6760 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6762 static tree
6763 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6765 int size;
6766 machine_mode mode;
6767 unsigned int mode_align, type_align;
6769 if (TREE_CODE (arg0) != INTEGER_CST)
6770 return NULL_TREE;
6772 /* We need a corresponding integer mode for the access to be lock-free. */
6773 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6774 if (!int_mode_for_size (size, 0).exists (&mode))
6775 return boolean_false_node;
6777 mode_align = GET_MODE_ALIGNMENT (mode);
6779 if (TREE_CODE (arg1) == INTEGER_CST)
6781 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6783 /* Either this argument is null, or it's a fake pointer encoding
6784 the alignment of the object. */
6785 val = least_bit_hwi (val);
6786 val *= BITS_PER_UNIT;
6788 if (val == 0 || mode_align < val)
6789 type_align = mode_align;
6790 else
6791 type_align = val;
6793 else
6795 tree ttype = TREE_TYPE (arg1);
6797 /* This function is usually invoked and folded immediately by the front
6798 end before anything else has a chance to look at it. The pointer
6799 parameter at this point is usually cast to a void *, so check for that
6800 and look past the cast. */
6801 if (CONVERT_EXPR_P (arg1)
6802 && POINTER_TYPE_P (ttype)
6803 && VOID_TYPE_P (TREE_TYPE (ttype))
6804 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6805 arg1 = TREE_OPERAND (arg1, 0);
6807 ttype = TREE_TYPE (arg1);
6808 gcc_assert (POINTER_TYPE_P (ttype));
6810 /* Get the underlying type of the object. */
6811 ttype = TREE_TYPE (ttype);
6812 type_align = TYPE_ALIGN (ttype);
6815 /* If the object has smaller alignment, the lock free routines cannot
6816 be used. */
6817 if (type_align < mode_align)
6818 return boolean_false_node;
6820 /* Check if a compare_and_swap pattern exists for the mode which represents
6821 the required size. The pattern is not allowed to fail, so the existence
6822 of the pattern indicates support is present. Also require that an
6823 atomic load exists for the required size. */
6824 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6825 return boolean_true_node;
6826 else
6827 return boolean_false_node;
6830 /* Return true if the parameters to call EXP represent an object which will
6831 always generate lock free instructions. The first argument represents the
6832 size of the object, and the second parameter is a pointer to the object
6833 itself. If NULL is passed for the object, then the result is based on
6834 typical alignment for an object of the specified size. Otherwise return
6835 false. */
6837 static rtx
6838 expand_builtin_atomic_always_lock_free (tree exp)
6840 tree size;
6841 tree arg0 = CALL_EXPR_ARG (exp, 0);
6842 tree arg1 = CALL_EXPR_ARG (exp, 1);
6844 if (TREE_CODE (arg0) != INTEGER_CST)
6846 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6847 return const0_rtx;
6850 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6851 if (size == boolean_true_node)
6852 return const1_rtx;
6853 return const0_rtx;
6856 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6857 is lock free on this architecture. */
6859 static tree
6860 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6862 if (!flag_inline_atomics)
6863 return NULL_TREE;
6865 /* If it isn't always lock free, don't generate a result. */
6866 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6867 return boolean_true_node;
6869 return NULL_TREE;
6872 /* Return true if the parameters to call EXP represent an object which will
6873 always generate lock free instructions. The first argument represents the
6874 size of the object, and the second parameter is a pointer to the object
6875 itself. If NULL is passed for the object, then the result is based on
6876 typical alignment for an object of the specified size. Otherwise return
6877 NULL*/
6879 static rtx
6880 expand_builtin_atomic_is_lock_free (tree exp)
6882 tree size;
6883 tree arg0 = CALL_EXPR_ARG (exp, 0);
6884 tree arg1 = CALL_EXPR_ARG (exp, 1);
6886 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6888 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6889 return NULL_RTX;
6892 if (!flag_inline_atomics)
6893 return NULL_RTX;
6895 /* If the value is known at compile time, return the RTX for it. */
6896 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6897 if (size == boolean_true_node)
6898 return const1_rtx;
6900 return NULL_RTX;
6903 /* Expand the __atomic_thread_fence intrinsic:
6904 void __atomic_thread_fence (enum memmodel)
6905 EXP is the CALL_EXPR. */
6907 static void
6908 expand_builtin_atomic_thread_fence (tree exp)
6910 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6911 expand_mem_thread_fence (model);
6914 /* Expand the __atomic_signal_fence intrinsic:
6915 void __atomic_signal_fence (enum memmodel)
6916 EXP is the CALL_EXPR. */
6918 static void
6919 expand_builtin_atomic_signal_fence (tree exp)
6921 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6922 expand_mem_signal_fence (model);
6925 /* Expand the __sync_synchronize intrinsic. */
6927 static void
6928 expand_builtin_sync_synchronize (void)
6930 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6933 static rtx
6934 expand_builtin_thread_pointer (tree exp, rtx target)
6936 enum insn_code icode;
6937 if (!validate_arglist (exp, VOID_TYPE))
6938 return const0_rtx;
6939 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6940 if (icode != CODE_FOR_nothing)
6942 class expand_operand op;
6943 /* If the target is not sutitable then create a new target. */
6944 if (target == NULL_RTX
6945 || !REG_P (target)
6946 || GET_MODE (target) != Pmode)
6947 target = gen_reg_rtx (Pmode);
6948 create_output_operand (&op, target, Pmode);
6949 expand_insn (icode, 1, &op);
6950 return target;
6952 error ("%<__builtin_thread_pointer%> is not supported on this target");
6953 return const0_rtx;
6956 static void
6957 expand_builtin_set_thread_pointer (tree exp)
6959 enum insn_code icode;
6960 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6961 return;
6962 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6963 if (icode != CODE_FOR_nothing)
6965 class expand_operand op;
6966 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6967 Pmode, EXPAND_NORMAL);
6968 create_input_operand (&op, val, Pmode);
6969 expand_insn (icode, 1, &op);
6970 return;
6972 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6976 /* Emit code to restore the current value of stack. */
6978 static void
6979 expand_stack_restore (tree var)
6981 rtx_insn *prev;
6982 rtx sa = expand_normal (var);
6984 sa = convert_memory_address (Pmode, sa);
6986 prev = get_last_insn ();
6987 emit_stack_restore (SAVE_BLOCK, sa);
6989 record_new_stack_level ();
6991 fixup_args_size_notes (prev, get_last_insn (), 0);
6994 /* Emit code to save the current value of stack. */
6996 static rtx
6997 expand_stack_save (void)
6999 rtx ret = NULL_RTX;
7001 emit_stack_save (SAVE_BLOCK, &ret);
7002 return ret;
7005 /* Emit code to get the openacc gang, worker or vector id or size. */
7007 static rtx
7008 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7010 const char *name;
7011 rtx fallback_retval;
7012 rtx_insn *(*gen_fn) (rtx, rtx);
7013 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7015 case BUILT_IN_GOACC_PARLEVEL_ID:
7016 name = "__builtin_goacc_parlevel_id";
7017 fallback_retval = const0_rtx;
7018 gen_fn = targetm.gen_oacc_dim_pos;
7019 break;
7020 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7021 name = "__builtin_goacc_parlevel_size";
7022 fallback_retval = const1_rtx;
7023 gen_fn = targetm.gen_oacc_dim_size;
7024 break;
7025 default:
7026 gcc_unreachable ();
7029 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7031 error ("%qs only supported in OpenACC code", name);
7032 return const0_rtx;
7035 tree arg = CALL_EXPR_ARG (exp, 0);
7036 if (TREE_CODE (arg) != INTEGER_CST)
7038 error ("non-constant argument 0 to %qs", name);
7039 return const0_rtx;
7042 int dim = TREE_INT_CST_LOW (arg);
7043 switch (dim)
7045 case GOMP_DIM_GANG:
7046 case GOMP_DIM_WORKER:
7047 case GOMP_DIM_VECTOR:
7048 break;
7049 default:
7050 error ("illegal argument 0 to %qs", name);
7051 return const0_rtx;
7054 if (ignore)
7055 return target;
7057 if (target == NULL_RTX)
7058 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7060 if (!targetm.have_oacc_dim_size ())
7062 emit_move_insn (target, fallback_retval);
7063 return target;
7066 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7067 emit_insn (gen_fn (reg, GEN_INT (dim)));
7068 if (reg != target)
7069 emit_move_insn (target, reg);
7071 return target;
7074 /* Expand a string compare operation using a sequence of char comparison
7075 to get rid of the calling overhead, with result going to TARGET if
7076 that's convenient.
7078 VAR_STR is the variable string source;
7079 CONST_STR is the constant string source;
7080 LENGTH is the number of chars to compare;
7081 CONST_STR_N indicates which source string is the constant string;
7082 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7084 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7086 target = (int) (unsigned char) var_str[0]
7087 - (int) (unsigned char) const_str[0];
7088 if (target != 0)
7089 goto ne_label;
7091 target = (int) (unsigned char) var_str[length - 2]
7092 - (int) (unsigned char) const_str[length - 2];
7093 if (target != 0)
7094 goto ne_label;
7095 target = (int) (unsigned char) var_str[length - 1]
7096 - (int) (unsigned char) const_str[length - 1];
7097 ne_label:
7100 static rtx
7101 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7102 unsigned HOST_WIDE_INT length,
7103 int const_str_n, machine_mode mode)
7105 HOST_WIDE_INT offset = 0;
7106 rtx var_rtx_array
7107 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7108 rtx var_rtx = NULL_RTX;
7109 rtx const_rtx = NULL_RTX;
7110 rtx result = target ? target : gen_reg_rtx (mode);
7111 rtx_code_label *ne_label = gen_label_rtx ();
7112 tree unit_type_node = unsigned_char_type_node;
7113 scalar_int_mode unit_mode
7114 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7116 start_sequence ();
7118 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7120 var_rtx
7121 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7122 const_rtx = c_readstr (const_str + offset, unit_mode);
7123 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7124 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7126 op0 = convert_modes (mode, unit_mode, op0, 1);
7127 op1 = convert_modes (mode, unit_mode, op1, 1);
7128 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7129 result, 1, OPTAB_WIDEN);
7131 /* Force the difference into result register. We cannot reassign
7132 result here ("result = diff") or we may end up returning
7133 uninitialized result when expand_simple_binop allocates a new
7134 pseudo-register for returning. */
7135 if (diff != result)
7136 emit_move_insn (result, diff);
7138 if (i < length - 1)
7139 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7140 mode, true, ne_label);
7141 offset += GET_MODE_SIZE (unit_mode);
7144 emit_label (ne_label);
7145 rtx_insn *insns = get_insns ();
7146 end_sequence ();
7147 emit_insn (insns);
7149 return result;
7152 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7153 to TARGET if that's convenient.
7154 If the call is not been inlined, return NULL_RTX. */
7156 static rtx
7157 inline_expand_builtin_bytecmp (tree exp, rtx target)
7159 tree fndecl = get_callee_fndecl (exp);
7160 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7161 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7163 /* Do NOT apply this inlining expansion when optimizing for size or
7164 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7165 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7166 return NULL_RTX;
7168 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7169 || fcode == BUILT_IN_STRNCMP
7170 || fcode == BUILT_IN_MEMCMP);
7172 /* On a target where the type of the call (int) has same or narrower presicion
7173 than unsigned char, give up the inlining expansion. */
7174 if (TYPE_PRECISION (unsigned_char_type_node)
7175 >= TYPE_PRECISION (TREE_TYPE (exp)))
7176 return NULL_RTX;
7178 tree arg1 = CALL_EXPR_ARG (exp, 0);
7179 tree arg2 = CALL_EXPR_ARG (exp, 1);
7180 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7182 unsigned HOST_WIDE_INT len1 = 0;
7183 unsigned HOST_WIDE_INT len2 = 0;
7184 unsigned HOST_WIDE_INT len3 = 0;
7186 /* Get the object representation of the initializers of ARG1 and ARG2
7187 as strings, provided they refer to constant objects, with their byte
7188 sizes in LEN1 and LEN2, respectively. */
7189 const char *bytes1 = getbyterep (arg1, &len1);
7190 const char *bytes2 = getbyterep (arg2, &len2);
7192 /* Fail if neither argument refers to an initialized constant. */
7193 if (!bytes1 && !bytes2)
7194 return NULL_RTX;
7196 if (is_ncmp)
7198 /* Fail if the memcmp/strncmp bound is not a constant. */
7199 if (!tree_fits_uhwi_p (len3_tree))
7200 return NULL_RTX;
7202 len3 = tree_to_uhwi (len3_tree);
7204 if (fcode == BUILT_IN_MEMCMP)
7206 /* Fail if the memcmp bound is greater than the size of either
7207 of the two constant objects. */
7208 if ((bytes1 && len1 < len3)
7209 || (bytes2 && len2 < len3))
7210 return NULL_RTX;
7214 if (fcode != BUILT_IN_MEMCMP)
7216 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7217 and LEN2 to the length of the nul-terminated string stored
7218 in each. */
7219 if (bytes1 != NULL)
7220 len1 = strnlen (bytes1, len1) + 1;
7221 if (bytes2 != NULL)
7222 len2 = strnlen (bytes2, len2) + 1;
7225 /* See inline_string_cmp. */
7226 int const_str_n;
7227 if (!len1)
7228 const_str_n = 2;
7229 else if (!len2)
7230 const_str_n = 1;
7231 else if (len2 > len1)
7232 const_str_n = 1;
7233 else
7234 const_str_n = 2;
7236 /* For strncmp only, compute the new bound as the smallest of
7237 the lengths of the two strings (plus 1) and the bound provided
7238 to the function. */
7239 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7240 if (is_ncmp && len3 < bound)
7241 bound = len3;
7243 /* If the bound of the comparison is larger than the threshold,
7244 do nothing. */
7245 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7246 return NULL_RTX;
7248 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7250 /* Now, start inline expansion the call. */
7251 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7252 (const_str_n == 1) ? bytes1 : bytes2, bound,
7253 const_str_n, mode);
7256 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7257 represents the size of the first argument to that call, or VOIDmode
7258 if the argument is a pointer. IGNORE will be true if the result
7259 isn't used. */
7260 static rtx
7261 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7262 bool ignore)
7264 rtx val, failsafe;
7265 unsigned nargs = call_expr_nargs (exp);
7267 tree arg0 = CALL_EXPR_ARG (exp, 0);
7269 if (mode == VOIDmode)
7271 mode = TYPE_MODE (TREE_TYPE (arg0));
7272 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7275 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7277 /* An optional second argument can be used as a failsafe value on
7278 some machines. If it isn't present, then the failsafe value is
7279 assumed to be 0. */
7280 if (nargs > 1)
7282 tree arg1 = CALL_EXPR_ARG (exp, 1);
7283 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7285 else
7286 failsafe = const0_rtx;
7288 /* If the result isn't used, the behavior is undefined. It would be
7289 nice to emit a warning here, but path splitting means this might
7290 happen with legitimate code. So simply drop the builtin
7291 expansion in that case; we've handled any side-effects above. */
7292 if (ignore)
7293 return const0_rtx;
7295 /* If we don't have a suitable target, create one to hold the result. */
7296 if (target == NULL || GET_MODE (target) != mode)
7297 target = gen_reg_rtx (mode);
7299 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7300 val = convert_modes (mode, VOIDmode, val, false);
7302 return targetm.speculation_safe_value (mode, target, val, failsafe);
7305 /* Expand an expression EXP that calls a built-in function,
7306 with result going to TARGET if that's convenient
7307 (and in mode MODE if that's convenient).
7308 SUBTARGET may be used as the target for computing one of EXP's operands.
7309 IGNORE is nonzero if the value is to be ignored. */
7312 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7313 int ignore)
7315 tree fndecl = get_callee_fndecl (exp);
7316 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7317 int flags;
7319 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7320 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7322 /* When ASan is enabled, we don't want to expand some memory/string
7323 builtins and rely on libsanitizer's hooks. This allows us to avoid
7324 redundant checks and be sure, that possible overflow will be detected
7325 by ASan. */
7327 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7328 if (param_asan_kernel_mem_intrinsic_prefix
7329 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7330 | SANITIZE_KERNEL_HWADDRESS))
7331 switch (fcode)
7333 rtx save_decl_rtl, ret;
7334 case BUILT_IN_MEMCPY:
7335 case BUILT_IN_MEMMOVE:
7336 case BUILT_IN_MEMSET:
7337 save_decl_rtl = DECL_RTL (fndecl);
7338 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7339 ret = expand_call (exp, target, ignore);
7340 DECL_RTL (fndecl) = save_decl_rtl;
7341 return ret;
7342 default:
7343 break;
7345 if (sanitize_flags_p (SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7346 return expand_call (exp, target, ignore);
7348 /* When not optimizing, generate calls to library functions for a certain
7349 set of builtins. */
7350 if (!optimize
7351 && !called_as_built_in (fndecl)
7352 && fcode != BUILT_IN_FORK
7353 && fcode != BUILT_IN_EXECL
7354 && fcode != BUILT_IN_EXECV
7355 && fcode != BUILT_IN_EXECLP
7356 && fcode != BUILT_IN_EXECLE
7357 && fcode != BUILT_IN_EXECVP
7358 && fcode != BUILT_IN_EXECVE
7359 && fcode != BUILT_IN_CLEAR_CACHE
7360 && !ALLOCA_FUNCTION_CODE_P (fcode)
7361 && fcode != BUILT_IN_FREE)
7362 return expand_call (exp, target, ignore);
7364 /* The built-in function expanders test for target == const0_rtx
7365 to determine whether the function's result will be ignored. */
7366 if (ignore)
7367 target = const0_rtx;
7369 /* If the result of a pure or const built-in function is ignored, and
7370 none of its arguments are volatile, we can avoid expanding the
7371 built-in call and just evaluate the arguments for side-effects. */
7372 if (target == const0_rtx
7373 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7374 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7376 bool volatilep = false;
7377 tree arg;
7378 call_expr_arg_iterator iter;
7380 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7381 if (TREE_THIS_VOLATILE (arg))
7383 volatilep = true;
7384 break;
7387 if (! volatilep)
7389 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7390 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7391 return const0_rtx;
7395 switch (fcode)
7397 CASE_FLT_FN (BUILT_IN_FABS):
7398 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7399 case BUILT_IN_FABSD32:
7400 case BUILT_IN_FABSD64:
7401 case BUILT_IN_FABSD128:
7402 target = expand_builtin_fabs (exp, target, subtarget);
7403 if (target)
7404 return target;
7405 break;
7407 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7408 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7409 target = expand_builtin_copysign (exp, target, subtarget);
7410 if (target)
7411 return target;
7412 break;
7414 /* Just do a normal library call if we were unable to fold
7415 the values. */
7416 CASE_FLT_FN (BUILT_IN_CABS):
7417 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7418 break;
7420 CASE_FLT_FN (BUILT_IN_FMA):
7421 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7422 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7423 if (target)
7424 return target;
7425 break;
7427 CASE_FLT_FN (BUILT_IN_ILOGB):
7428 if (! flag_unsafe_math_optimizations)
7429 break;
7430 gcc_fallthrough ();
7431 CASE_FLT_FN (BUILT_IN_ISINF):
7432 CASE_FLT_FN (BUILT_IN_FINITE):
7433 case BUILT_IN_ISFINITE:
7434 case BUILT_IN_ISNORMAL:
7435 target = expand_builtin_interclass_mathfn (exp, target);
7436 if (target)
7437 return target;
7438 break;
7440 case BUILT_IN_ISSIGNALING:
7441 target = expand_builtin_issignaling (exp, target);
7442 if (target)
7443 return target;
7444 break;
7446 CASE_FLT_FN (BUILT_IN_ICEIL):
7447 CASE_FLT_FN (BUILT_IN_LCEIL):
7448 CASE_FLT_FN (BUILT_IN_LLCEIL):
7449 CASE_FLT_FN (BUILT_IN_LFLOOR):
7450 CASE_FLT_FN (BUILT_IN_IFLOOR):
7451 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7452 target = expand_builtin_int_roundingfn (exp, target);
7453 if (target)
7454 return target;
7455 break;
7457 CASE_FLT_FN (BUILT_IN_IRINT):
7458 CASE_FLT_FN (BUILT_IN_LRINT):
7459 CASE_FLT_FN (BUILT_IN_LLRINT):
7460 CASE_FLT_FN (BUILT_IN_IROUND):
7461 CASE_FLT_FN (BUILT_IN_LROUND):
7462 CASE_FLT_FN (BUILT_IN_LLROUND):
7463 target = expand_builtin_int_roundingfn_2 (exp, target);
7464 if (target)
7465 return target;
7466 break;
7468 CASE_FLT_FN (BUILT_IN_POWI):
7469 target = expand_builtin_powi (exp, target);
7470 if (target)
7471 return target;
7472 break;
7474 CASE_FLT_FN (BUILT_IN_CEXPI):
7475 target = expand_builtin_cexpi (exp, target);
7476 gcc_assert (target);
7477 return target;
7479 CASE_FLT_FN (BUILT_IN_SIN):
7480 CASE_FLT_FN (BUILT_IN_COS):
7481 if (! flag_unsafe_math_optimizations)
7482 break;
7483 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7484 if (target)
7485 return target;
7486 break;
7488 CASE_FLT_FN (BUILT_IN_SINCOS):
7489 if (! flag_unsafe_math_optimizations)
7490 break;
7491 target = expand_builtin_sincos (exp);
7492 if (target)
7493 return target;
7494 break;
7496 case BUILT_IN_FEGETROUND:
7497 target = expand_builtin_fegetround (exp, target, target_mode);
7498 if (target)
7499 return target;
7500 break;
7502 case BUILT_IN_FECLEAREXCEPT:
7503 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7504 feclearexcept_optab);
7505 if (target)
7506 return target;
7507 break;
7509 case BUILT_IN_FERAISEEXCEPT:
7510 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7511 feraiseexcept_optab);
7512 if (target)
7513 return target;
7514 break;
7516 case BUILT_IN_APPLY_ARGS:
7517 return expand_builtin_apply_args ();
7519 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7520 FUNCTION with a copy of the parameters described by
7521 ARGUMENTS, and ARGSIZE. It returns a block of memory
7522 allocated on the stack into which is stored all the registers
7523 that might possibly be used for returning the result of a
7524 function. ARGUMENTS is the value returned by
7525 __builtin_apply_args. ARGSIZE is the number of bytes of
7526 arguments that must be copied. ??? How should this value be
7527 computed? We'll also need a safe worst case value for varargs
7528 functions. */
7529 case BUILT_IN_APPLY:
7530 if (!validate_arglist (exp, POINTER_TYPE,
7531 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7532 && !validate_arglist (exp, REFERENCE_TYPE,
7533 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7534 return const0_rtx;
7535 else
7537 rtx ops[3];
7539 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7540 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7541 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7543 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7546 /* __builtin_return (RESULT) causes the function to return the
7547 value described by RESULT. RESULT is address of the block of
7548 memory returned by __builtin_apply. */
7549 case BUILT_IN_RETURN:
7550 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7551 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7552 return const0_rtx;
7554 case BUILT_IN_SAVEREGS:
7555 return expand_builtin_saveregs ();
7557 case BUILT_IN_VA_ARG_PACK:
7558 /* All valid uses of __builtin_va_arg_pack () are removed during
7559 inlining. */
7560 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7561 return const0_rtx;
7563 case BUILT_IN_VA_ARG_PACK_LEN:
7564 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7565 inlining. */
7566 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7567 return const0_rtx;
7569 /* Return the address of the first anonymous stack arg. */
7570 case BUILT_IN_NEXT_ARG:
7571 if (fold_builtin_next_arg (exp, false))
7572 return const0_rtx;
7573 return expand_builtin_next_arg ();
7575 case BUILT_IN_CLEAR_CACHE:
7576 expand_builtin___clear_cache (exp);
7577 return const0_rtx;
7579 case BUILT_IN_CLASSIFY_TYPE:
7580 return expand_builtin_classify_type (exp);
7582 case BUILT_IN_CONSTANT_P:
7583 return const0_rtx;
7585 case BUILT_IN_FRAME_ADDRESS:
7586 case BUILT_IN_RETURN_ADDRESS:
7587 return expand_builtin_frame_address (fndecl, exp);
7589 /* Returns the address of the area where the structure is returned.
7590 0 otherwise. */
7591 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7592 if (call_expr_nargs (exp) != 0
7593 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7594 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7595 return const0_rtx;
7596 else
7597 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7599 CASE_BUILT_IN_ALLOCA:
7600 target = expand_builtin_alloca (exp);
7601 if (target)
7602 return target;
7603 break;
7605 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7606 return expand_asan_emit_allocas_unpoison (exp);
7608 case BUILT_IN_STACK_SAVE:
7609 return expand_stack_save ();
7611 case BUILT_IN_STACK_RESTORE:
7612 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7613 return const0_rtx;
7615 case BUILT_IN_BSWAP16:
7616 case BUILT_IN_BSWAP32:
7617 case BUILT_IN_BSWAP64:
7618 case BUILT_IN_BSWAP128:
7619 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7620 if (target)
7621 return target;
7622 break;
7624 CASE_INT_FN (BUILT_IN_FFS):
7625 target = expand_builtin_unop (target_mode, exp, target,
7626 subtarget, ffs_optab);
7627 if (target)
7628 return target;
7629 break;
7631 CASE_INT_FN (BUILT_IN_CLZ):
7632 target = expand_builtin_unop (target_mode, exp, target,
7633 subtarget, clz_optab);
7634 if (target)
7635 return target;
7636 break;
7638 CASE_INT_FN (BUILT_IN_CTZ):
7639 target = expand_builtin_unop (target_mode, exp, target,
7640 subtarget, ctz_optab);
7641 if (target)
7642 return target;
7643 break;
7645 CASE_INT_FN (BUILT_IN_CLRSB):
7646 target = expand_builtin_unop (target_mode, exp, target,
7647 subtarget, clrsb_optab);
7648 if (target)
7649 return target;
7650 break;
7652 CASE_INT_FN (BUILT_IN_POPCOUNT):
7653 target = expand_builtin_unop (target_mode, exp, target,
7654 subtarget, popcount_optab);
7655 if (target)
7656 return target;
7657 break;
7659 CASE_INT_FN (BUILT_IN_PARITY):
7660 target = expand_builtin_unop (target_mode, exp, target,
7661 subtarget, parity_optab);
7662 if (target)
7663 return target;
7664 break;
7666 case BUILT_IN_STRLEN:
7667 target = expand_builtin_strlen (exp, target, target_mode);
7668 if (target)
7669 return target;
7670 break;
7672 case BUILT_IN_STRNLEN:
7673 target = expand_builtin_strnlen (exp, target, target_mode);
7674 if (target)
7675 return target;
7676 break;
7678 case BUILT_IN_STRCPY:
7679 target = expand_builtin_strcpy (exp, target);
7680 if (target)
7681 return target;
7682 break;
7684 case BUILT_IN_STRNCPY:
7685 target = expand_builtin_strncpy (exp, target);
7686 if (target)
7687 return target;
7688 break;
7690 case BUILT_IN_STPCPY:
7691 target = expand_builtin_stpcpy (exp, target, mode);
7692 if (target)
7693 return target;
7694 break;
7696 case BUILT_IN_MEMCPY:
7697 target = expand_builtin_memcpy (exp, target);
7698 if (target)
7699 return target;
7700 break;
7702 case BUILT_IN_MEMMOVE:
7703 target = expand_builtin_memmove (exp, target);
7704 if (target)
7705 return target;
7706 break;
7708 case BUILT_IN_MEMPCPY:
7709 target = expand_builtin_mempcpy (exp, target);
7710 if (target)
7711 return target;
7712 break;
7714 case BUILT_IN_MEMSET:
7715 target = expand_builtin_memset (exp, target, mode);
7716 if (target)
7717 return target;
7718 break;
7720 case BUILT_IN_BZERO:
7721 target = expand_builtin_bzero (exp);
7722 if (target)
7723 return target;
7724 break;
7726 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7727 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7728 when changing it to a strcmp call. */
7729 case BUILT_IN_STRCMP_EQ:
7730 target = expand_builtin_memcmp (exp, target, true);
7731 if (target)
7732 return target;
7734 /* Change this call back to a BUILT_IN_STRCMP. */
7735 TREE_OPERAND (exp, 1)
7736 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7738 /* Delete the last parameter. */
7739 unsigned int i;
7740 vec<tree, va_gc> *arg_vec;
7741 vec_alloc (arg_vec, 2);
7742 for (i = 0; i < 2; i++)
7743 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7744 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7745 /* FALLTHROUGH */
7747 case BUILT_IN_STRCMP:
7748 target = expand_builtin_strcmp (exp, target);
7749 if (target)
7750 return target;
7751 break;
7753 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7754 back to a BUILT_IN_STRNCMP. */
7755 case BUILT_IN_STRNCMP_EQ:
7756 target = expand_builtin_memcmp (exp, target, true);
7757 if (target)
7758 return target;
7760 /* Change it back to a BUILT_IN_STRNCMP. */
7761 TREE_OPERAND (exp, 1)
7762 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7763 /* FALLTHROUGH */
7765 case BUILT_IN_STRNCMP:
7766 target = expand_builtin_strncmp (exp, target, mode);
7767 if (target)
7768 return target;
7769 break;
7771 case BUILT_IN_BCMP:
7772 case BUILT_IN_MEMCMP:
7773 case BUILT_IN_MEMCMP_EQ:
7774 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7775 if (target)
7776 return target;
7777 if (fcode == BUILT_IN_MEMCMP_EQ)
7779 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7780 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7782 break;
7784 case BUILT_IN_SETJMP:
7785 /* This should have been lowered to the builtins below. */
7786 gcc_unreachable ();
7788 case BUILT_IN_SETJMP_SETUP:
7789 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7790 and the receiver label. */
7791 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7793 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7794 VOIDmode, EXPAND_NORMAL);
7795 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7796 rtx_insn *label_r = label_rtx (label);
7798 expand_builtin_setjmp_setup (buf_addr, label_r);
7799 return const0_rtx;
7801 break;
7803 case BUILT_IN_SETJMP_RECEIVER:
7804 /* __builtin_setjmp_receiver is passed the receiver label. */
7805 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7807 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7808 rtx_insn *label_r = label_rtx (label);
7810 expand_builtin_setjmp_receiver (label_r);
7811 nonlocal_goto_handler_labels
7812 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7813 nonlocal_goto_handler_labels);
7814 /* ??? Do not let expand_label treat us as such since we would
7815 not want to be both on the list of non-local labels and on
7816 the list of forced labels. */
7817 FORCED_LABEL (label) = 0;
7818 return const0_rtx;
7820 break;
7822 /* __builtin_longjmp is passed a pointer to an array of five words.
7823 It's similar to the C library longjmp function but works with
7824 __builtin_setjmp above. */
7825 case BUILT_IN_LONGJMP:
7826 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7828 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7829 VOIDmode, EXPAND_NORMAL);
7830 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7832 if (value != const1_rtx)
7834 error ("%<__builtin_longjmp%> second argument must be 1");
7835 return const0_rtx;
7838 expand_builtin_longjmp (buf_addr, value);
7839 return const0_rtx;
7841 break;
7843 case BUILT_IN_NONLOCAL_GOTO:
7844 target = expand_builtin_nonlocal_goto (exp);
7845 if (target)
7846 return target;
7847 break;
7849 /* This updates the setjmp buffer that is its argument with the value
7850 of the current stack pointer. */
7851 case BUILT_IN_UPDATE_SETJMP_BUF:
7852 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7854 rtx buf_addr
7855 = expand_normal (CALL_EXPR_ARG (exp, 0));
7857 expand_builtin_update_setjmp_buf (buf_addr);
7858 return const0_rtx;
7860 break;
7862 case BUILT_IN_TRAP:
7863 case BUILT_IN_UNREACHABLE_TRAP:
7864 expand_builtin_trap ();
7865 return const0_rtx;
7867 case BUILT_IN_UNREACHABLE:
7868 expand_builtin_unreachable ();
7869 return const0_rtx;
7871 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7872 case BUILT_IN_SIGNBITD32:
7873 case BUILT_IN_SIGNBITD64:
7874 case BUILT_IN_SIGNBITD128:
7875 target = expand_builtin_signbit (exp, target);
7876 if (target)
7877 return target;
7878 break;
7880 /* Various hooks for the DWARF 2 __throw routine. */
7881 case BUILT_IN_UNWIND_INIT:
7882 expand_builtin_unwind_init ();
7883 return const0_rtx;
7884 case BUILT_IN_DWARF_CFA:
7885 return virtual_cfa_rtx;
7886 #ifdef DWARF2_UNWIND_INFO
7887 case BUILT_IN_DWARF_SP_COLUMN:
7888 return expand_builtin_dwarf_sp_column ();
7889 case BUILT_IN_INIT_DWARF_REG_SIZES:
7890 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7891 return const0_rtx;
7892 #endif
7893 case BUILT_IN_FROB_RETURN_ADDR:
7894 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7895 case BUILT_IN_EXTRACT_RETURN_ADDR:
7896 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7897 case BUILT_IN_EH_RETURN:
7898 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7899 CALL_EXPR_ARG (exp, 1));
7900 return const0_rtx;
7901 case BUILT_IN_EH_RETURN_DATA_REGNO:
7902 return expand_builtin_eh_return_data_regno (exp);
7903 case BUILT_IN_EXTEND_POINTER:
7904 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7905 case BUILT_IN_EH_POINTER:
7906 return expand_builtin_eh_pointer (exp);
7907 case BUILT_IN_EH_FILTER:
7908 return expand_builtin_eh_filter (exp);
7909 case BUILT_IN_EH_COPY_VALUES:
7910 return expand_builtin_eh_copy_values (exp);
7912 case BUILT_IN_VA_START:
7913 return expand_builtin_va_start (exp);
7914 case BUILT_IN_VA_END:
7915 return expand_builtin_va_end (exp);
7916 case BUILT_IN_VA_COPY:
7917 return expand_builtin_va_copy (exp);
7918 case BUILT_IN_EXPECT:
7919 return expand_builtin_expect (exp, target);
7920 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7921 return expand_builtin_expect_with_probability (exp, target);
7922 case BUILT_IN_ASSUME_ALIGNED:
7923 return expand_builtin_assume_aligned (exp, target);
7924 case BUILT_IN_PREFETCH:
7925 expand_builtin_prefetch (exp);
7926 return const0_rtx;
7928 case BUILT_IN_INIT_TRAMPOLINE:
7929 return expand_builtin_init_trampoline (exp, true);
7930 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7931 return expand_builtin_init_trampoline (exp, false);
7932 case BUILT_IN_ADJUST_TRAMPOLINE:
7933 return expand_builtin_adjust_trampoline (exp);
7935 case BUILT_IN_INIT_DESCRIPTOR:
7936 return expand_builtin_init_descriptor (exp);
7937 case BUILT_IN_ADJUST_DESCRIPTOR:
7938 return expand_builtin_adjust_descriptor (exp);
7940 case BUILT_IN_FORK:
7941 case BUILT_IN_EXECL:
7942 case BUILT_IN_EXECV:
7943 case BUILT_IN_EXECLP:
7944 case BUILT_IN_EXECLE:
7945 case BUILT_IN_EXECVP:
7946 case BUILT_IN_EXECVE:
7947 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7948 if (target)
7949 return target;
7950 break;
7952 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7953 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7954 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7955 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7956 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7957 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7958 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7959 if (target)
7960 return target;
7961 break;
7963 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7964 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7965 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7966 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7967 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7968 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7969 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7970 if (target)
7971 return target;
7972 break;
7974 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7975 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7976 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7977 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7978 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7979 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7980 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7986 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7987 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7988 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7989 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7990 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7991 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7992 if (target)
7993 return target;
7994 break;
7996 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7997 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7998 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7999 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8000 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8002 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8003 if (target)
8004 return target;
8005 break;
8007 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8008 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8009 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8010 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8011 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8012 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8013 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8014 if (target)
8015 return target;
8016 break;
8018 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8019 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8020 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8021 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8022 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8023 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8024 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8025 if (target)
8026 return target;
8027 break;
8029 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8030 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8031 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8032 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8033 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8034 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8035 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8036 if (target)
8037 return target;
8038 break;
8040 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8041 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8042 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8043 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8044 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8045 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8046 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8047 if (target)
8048 return target;
8049 break;
8051 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8052 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8053 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8054 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8055 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8056 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8057 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8058 if (target)
8059 return target;
8060 break;
8062 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8063 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8064 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8065 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8066 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8068 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8069 if (target)
8070 return target;
8071 break;
8073 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8074 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8075 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8076 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8077 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8078 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8079 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8080 if (target)
8081 return target;
8082 break;
8084 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8085 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8086 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8087 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8088 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8089 if (mode == VOIDmode)
8090 mode = TYPE_MODE (boolean_type_node);
8091 if (!target || !register_operand (target, mode))
8092 target = gen_reg_rtx (mode);
8094 mode = get_builtin_sync_mode
8095 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8096 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8097 if (target)
8098 return target;
8099 break;
8101 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8102 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8103 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8104 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8105 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8106 mode = get_builtin_sync_mode
8107 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8108 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8109 if (target)
8110 return target;
8111 break;
8113 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8114 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8115 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8116 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8117 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8118 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8119 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8120 if (target)
8121 return target;
8122 break;
8124 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8125 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8126 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8127 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8128 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8129 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8130 expand_builtin_sync_lock_release (mode, exp);
8131 return const0_rtx;
8133 case BUILT_IN_SYNC_SYNCHRONIZE:
8134 expand_builtin_sync_synchronize ();
8135 return const0_rtx;
8137 case BUILT_IN_ATOMIC_EXCHANGE_1:
8138 case BUILT_IN_ATOMIC_EXCHANGE_2:
8139 case BUILT_IN_ATOMIC_EXCHANGE_4:
8140 case BUILT_IN_ATOMIC_EXCHANGE_8:
8141 case BUILT_IN_ATOMIC_EXCHANGE_16:
8142 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8143 target = expand_builtin_atomic_exchange (mode, exp, target);
8144 if (target)
8145 return target;
8146 break;
8148 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8149 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8150 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8151 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8152 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8154 unsigned int nargs, z;
8155 vec<tree, va_gc> *vec;
8157 mode =
8158 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8159 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8160 if (target)
8161 return target;
8163 /* If this is turned into an external library call, the weak parameter
8164 must be dropped to match the expected parameter list. */
8165 nargs = call_expr_nargs (exp);
8166 vec_alloc (vec, nargs - 1);
8167 for (z = 0; z < 3; z++)
8168 vec->quick_push (CALL_EXPR_ARG (exp, z));
8169 /* Skip the boolean weak parameter. */
8170 for (z = 4; z < 6; z++)
8171 vec->quick_push (CALL_EXPR_ARG (exp, z));
8172 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8173 break;
8176 case BUILT_IN_ATOMIC_LOAD_1:
8177 case BUILT_IN_ATOMIC_LOAD_2:
8178 case BUILT_IN_ATOMIC_LOAD_4:
8179 case BUILT_IN_ATOMIC_LOAD_8:
8180 case BUILT_IN_ATOMIC_LOAD_16:
8181 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8182 target = expand_builtin_atomic_load (mode, exp, target);
8183 if (target)
8184 return target;
8185 break;
8187 case BUILT_IN_ATOMIC_STORE_1:
8188 case BUILT_IN_ATOMIC_STORE_2:
8189 case BUILT_IN_ATOMIC_STORE_4:
8190 case BUILT_IN_ATOMIC_STORE_8:
8191 case BUILT_IN_ATOMIC_STORE_16:
8192 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8193 target = expand_builtin_atomic_store (mode, exp);
8194 if (target)
8195 return const0_rtx;
8196 break;
8198 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8199 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8200 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8201 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8202 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8204 enum built_in_function lib;
8205 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8206 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8207 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8208 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8209 ignore, lib);
8210 if (target)
8211 return target;
8212 break;
8214 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8215 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8216 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8217 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8218 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8220 enum built_in_function lib;
8221 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8222 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8223 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8224 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8225 ignore, lib);
8226 if (target)
8227 return target;
8228 break;
8230 case BUILT_IN_ATOMIC_AND_FETCH_1:
8231 case BUILT_IN_ATOMIC_AND_FETCH_2:
8232 case BUILT_IN_ATOMIC_AND_FETCH_4:
8233 case BUILT_IN_ATOMIC_AND_FETCH_8:
8234 case BUILT_IN_ATOMIC_AND_FETCH_16:
8236 enum built_in_function lib;
8237 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8238 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8239 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8240 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8241 ignore, lib);
8242 if (target)
8243 return target;
8244 break;
8246 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8247 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8248 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8249 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8250 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8252 enum built_in_function lib;
8253 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8254 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8255 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8256 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8257 ignore, lib);
8258 if (target)
8259 return target;
8260 break;
8262 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8263 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8264 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8265 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8266 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8268 enum built_in_function lib;
8269 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8270 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8271 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8272 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8273 ignore, lib);
8274 if (target)
8275 return target;
8276 break;
8278 case BUILT_IN_ATOMIC_OR_FETCH_1:
8279 case BUILT_IN_ATOMIC_OR_FETCH_2:
8280 case BUILT_IN_ATOMIC_OR_FETCH_4:
8281 case BUILT_IN_ATOMIC_OR_FETCH_8:
8282 case BUILT_IN_ATOMIC_OR_FETCH_16:
8284 enum built_in_function lib;
8285 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8286 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8287 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8288 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8289 ignore, lib);
8290 if (target)
8291 return target;
8292 break;
8294 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8295 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8296 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8297 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8298 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8299 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8300 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8301 ignore, BUILT_IN_NONE);
8302 if (target)
8303 return target;
8304 break;
8306 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8307 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8308 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8309 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8310 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8311 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8312 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8313 ignore, BUILT_IN_NONE);
8314 if (target)
8315 return target;
8316 break;
8318 case BUILT_IN_ATOMIC_FETCH_AND_1:
8319 case BUILT_IN_ATOMIC_FETCH_AND_2:
8320 case BUILT_IN_ATOMIC_FETCH_AND_4:
8321 case BUILT_IN_ATOMIC_FETCH_AND_8:
8322 case BUILT_IN_ATOMIC_FETCH_AND_16:
8323 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8324 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8325 ignore, BUILT_IN_NONE);
8326 if (target)
8327 return target;
8328 break;
8330 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8331 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8332 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8333 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8334 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8335 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8336 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8337 ignore, BUILT_IN_NONE);
8338 if (target)
8339 return target;
8340 break;
8342 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8343 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8344 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8345 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8346 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8347 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8348 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8349 ignore, BUILT_IN_NONE);
8350 if (target)
8351 return target;
8352 break;
8354 case BUILT_IN_ATOMIC_FETCH_OR_1:
8355 case BUILT_IN_ATOMIC_FETCH_OR_2:
8356 case BUILT_IN_ATOMIC_FETCH_OR_4:
8357 case BUILT_IN_ATOMIC_FETCH_OR_8:
8358 case BUILT_IN_ATOMIC_FETCH_OR_16:
8359 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8360 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8361 ignore, BUILT_IN_NONE);
8362 if (target)
8363 return target;
8364 break;
8366 case BUILT_IN_ATOMIC_TEST_AND_SET:
8367 target = expand_builtin_atomic_test_and_set (exp, target);
8368 if (target)
8369 return target;
8370 break;
8372 case BUILT_IN_ATOMIC_CLEAR:
8373 return expand_builtin_atomic_clear (exp);
8375 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8376 return expand_builtin_atomic_always_lock_free (exp);
8378 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8379 target = expand_builtin_atomic_is_lock_free (exp);
8380 if (target)
8381 return target;
8382 break;
8384 case BUILT_IN_ATOMIC_THREAD_FENCE:
8385 expand_builtin_atomic_thread_fence (exp);
8386 return const0_rtx;
8388 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8389 expand_builtin_atomic_signal_fence (exp);
8390 return const0_rtx;
8392 case BUILT_IN_OBJECT_SIZE:
8393 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8394 return expand_builtin_object_size (exp);
8396 case BUILT_IN_MEMCPY_CHK:
8397 case BUILT_IN_MEMPCPY_CHK:
8398 case BUILT_IN_MEMMOVE_CHK:
8399 case BUILT_IN_MEMSET_CHK:
8400 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8401 if (target)
8402 return target;
8403 break;
8405 case BUILT_IN_STRCPY_CHK:
8406 case BUILT_IN_STPCPY_CHK:
8407 case BUILT_IN_STRNCPY_CHK:
8408 case BUILT_IN_STPNCPY_CHK:
8409 case BUILT_IN_STRCAT_CHK:
8410 case BUILT_IN_STRNCAT_CHK:
8411 case BUILT_IN_SNPRINTF_CHK:
8412 case BUILT_IN_VSNPRINTF_CHK:
8413 maybe_emit_chk_warning (exp, fcode);
8414 break;
8416 case BUILT_IN_SPRINTF_CHK:
8417 case BUILT_IN_VSPRINTF_CHK:
8418 maybe_emit_sprintf_chk_warning (exp, fcode);
8419 break;
8421 case BUILT_IN_THREAD_POINTER:
8422 return expand_builtin_thread_pointer (exp, target);
8424 case BUILT_IN_SET_THREAD_POINTER:
8425 expand_builtin_set_thread_pointer (exp);
8426 return const0_rtx;
8428 case BUILT_IN_ACC_ON_DEVICE:
8429 /* Do library call, if we failed to expand the builtin when
8430 folding. */
8431 break;
8433 case BUILT_IN_GOACC_PARLEVEL_ID:
8434 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8435 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8437 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8438 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8440 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8441 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8442 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8443 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8444 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8445 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8446 return expand_speculation_safe_value (mode, exp, target, ignore);
8448 default: /* just do library call, if unknown builtin */
8449 break;
8452 /* The switch statement above can drop through to cause the function
8453 to be called normally. */
8454 return expand_call (exp, target, ignore);
8457 /* Determine whether a tree node represents a call to a built-in
8458 function. If the tree T is a call to a built-in function with
8459 the right number of arguments of the appropriate types, return
8460 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8461 Otherwise the return value is END_BUILTINS. */
8463 enum built_in_function
8464 builtin_mathfn_code (const_tree t)
8466 const_tree fndecl, arg, parmlist;
8467 const_tree argtype, parmtype;
8468 const_call_expr_arg_iterator iter;
8470 if (TREE_CODE (t) != CALL_EXPR)
8471 return END_BUILTINS;
8473 fndecl = get_callee_fndecl (t);
8474 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8475 return END_BUILTINS;
8477 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8478 init_const_call_expr_arg_iterator (t, &iter);
8479 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8481 /* If a function doesn't take a variable number of arguments,
8482 the last element in the list will have type `void'. */
8483 parmtype = TREE_VALUE (parmlist);
8484 if (VOID_TYPE_P (parmtype))
8486 if (more_const_call_expr_args_p (&iter))
8487 return END_BUILTINS;
8488 return DECL_FUNCTION_CODE (fndecl);
8491 if (! more_const_call_expr_args_p (&iter))
8492 return END_BUILTINS;
8494 arg = next_const_call_expr_arg (&iter);
8495 argtype = TREE_TYPE (arg);
8497 if (SCALAR_FLOAT_TYPE_P (parmtype))
8499 if (! SCALAR_FLOAT_TYPE_P (argtype))
8500 return END_BUILTINS;
8502 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8504 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8505 return END_BUILTINS;
8507 else if (POINTER_TYPE_P (parmtype))
8509 if (! POINTER_TYPE_P (argtype))
8510 return END_BUILTINS;
8512 else if (INTEGRAL_TYPE_P (parmtype))
8514 if (! INTEGRAL_TYPE_P (argtype))
8515 return END_BUILTINS;
8517 else
8518 return END_BUILTINS;
8521 /* Variable-length argument list. */
8522 return DECL_FUNCTION_CODE (fndecl);
8525 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8526 evaluate to a constant. */
8528 static tree
8529 fold_builtin_constant_p (tree arg)
8531 /* We return 1 for a numeric type that's known to be a constant
8532 value at compile-time or for an aggregate type that's a
8533 literal constant. */
8534 STRIP_NOPS (arg);
8536 /* If we know this is a constant, emit the constant of one. */
8537 if (CONSTANT_CLASS_P (arg)
8538 || (TREE_CODE (arg) == CONSTRUCTOR
8539 && TREE_CONSTANT (arg)))
8540 return integer_one_node;
8541 if (TREE_CODE (arg) == ADDR_EXPR)
8543 tree op = TREE_OPERAND (arg, 0);
8544 if (TREE_CODE (op) == STRING_CST
8545 || (TREE_CODE (op) == ARRAY_REF
8546 && integer_zerop (TREE_OPERAND (op, 1))
8547 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8548 return integer_one_node;
8551 /* If this expression has side effects, show we don't know it to be a
8552 constant. Likewise if it's a pointer or aggregate type since in
8553 those case we only want literals, since those are only optimized
8554 when generating RTL, not later.
8555 And finally, if we are compiling an initializer, not code, we
8556 need to return a definite result now; there's not going to be any
8557 more optimization done. */
8558 if (TREE_SIDE_EFFECTS (arg)
8559 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8560 || POINTER_TYPE_P (TREE_TYPE (arg))
8561 || cfun == 0
8562 || folding_initializer
8563 || force_folding_builtin_constant_p)
8564 return integer_zero_node;
8566 return NULL_TREE;
8569 /* Create builtin_expect or builtin_expect_with_probability
8570 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8571 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8572 builtin_expect_with_probability instead uses third argument as PROBABILITY
8573 value. */
8575 static tree
8576 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8577 tree predictor, tree probability)
8579 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8581 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8582 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8583 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8584 ret_type = TREE_TYPE (TREE_TYPE (fn));
8585 pred_type = TREE_VALUE (arg_types);
8586 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8588 pred = fold_convert_loc (loc, pred_type, pred);
8589 expected = fold_convert_loc (loc, expected_type, expected);
8591 if (probability)
8592 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8593 else
8594 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8595 predictor);
8597 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8598 build_int_cst (ret_type, 0));
8601 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8602 NULL_TREE if no simplification is possible. */
8604 tree
8605 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8606 tree arg3)
8608 tree inner, fndecl, inner_arg0;
8609 enum tree_code code;
8611 /* Distribute the expected value over short-circuiting operators.
8612 See through the cast from truthvalue_type_node to long. */
8613 inner_arg0 = arg0;
8614 while (CONVERT_EXPR_P (inner_arg0)
8615 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8616 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8617 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8619 /* If this is a builtin_expect within a builtin_expect keep the
8620 inner one. See through a comparison against a constant. It
8621 might have been added to create a thruthvalue. */
8622 inner = inner_arg0;
8624 if (COMPARISON_CLASS_P (inner)
8625 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8626 inner = TREE_OPERAND (inner, 0);
8628 if (TREE_CODE (inner) == CALL_EXPR
8629 && (fndecl = get_callee_fndecl (inner))
8630 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
8631 BUILT_IN_EXPECT_WITH_PROBABILITY))
8632 return arg0;
8634 inner = inner_arg0;
8635 code = TREE_CODE (inner);
8636 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8638 tree op0 = TREE_OPERAND (inner, 0);
8639 tree op1 = TREE_OPERAND (inner, 1);
8640 arg1 = save_expr (arg1);
8642 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8643 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8644 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8646 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8649 /* If the argument isn't invariant then there's nothing else we can do. */
8650 if (!TREE_CONSTANT (inner_arg0))
8651 return NULL_TREE;
8653 /* If we expect that a comparison against the argument will fold to
8654 a constant return the constant. In practice, this means a true
8655 constant or the address of a non-weak symbol. */
8656 inner = inner_arg0;
8657 STRIP_NOPS (inner);
8658 if (TREE_CODE (inner) == ADDR_EXPR)
8662 inner = TREE_OPERAND (inner, 0);
8664 while (TREE_CODE (inner) == COMPONENT_REF
8665 || TREE_CODE (inner) == ARRAY_REF);
8666 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8667 return NULL_TREE;
8670 /* Otherwise, ARG0 already has the proper type for the return value. */
8671 return arg0;
8674 /* Fold a call to __builtin_classify_type with argument ARG. */
8676 static tree
8677 fold_builtin_classify_type (tree arg)
8679 if (arg == 0)
8680 return build_int_cst (integer_type_node, no_type_class);
8682 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8685 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8686 ARG. */
8688 static tree
8689 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8691 if (!validate_arg (arg, POINTER_TYPE))
8692 return NULL_TREE;
8693 else
8695 c_strlen_data lendata = { };
8696 tree len = c_strlen (arg, 0, &lendata);
8698 if (len)
8699 return fold_convert_loc (loc, type, len);
8701 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8702 also early enough to detect invalid reads in multimensional
8703 arrays and struct members. */
8704 if (!lendata.decl)
8705 c_strlen (arg, 1, &lendata);
8707 if (lendata.decl)
8709 if (EXPR_HAS_LOCATION (arg))
8710 loc = EXPR_LOCATION (arg);
8711 else if (loc == UNKNOWN_LOCATION)
8712 loc = input_location;
8713 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8716 return NULL_TREE;
8720 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8722 static tree
8723 fold_builtin_inf (location_t loc, tree type, int warn)
8725 /* __builtin_inff is intended to be usable to define INFINITY on all
8726 targets. If an infinity is not available, INFINITY expands "to a
8727 positive constant of type float that overflows at translation
8728 time", footnote "In this case, using INFINITY will violate the
8729 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8730 Thus we pedwarn to ensure this constraint violation is
8731 diagnosed. */
8732 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8733 pedwarn (loc, 0, "target format does not support infinity");
8735 return build_real (type, dconstinf);
8738 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8739 NULL_TREE if no simplification can be made. */
8741 static tree
8742 fold_builtin_sincos (location_t loc,
8743 tree arg0, tree arg1, tree arg2)
8745 tree type;
8746 tree fndecl, call = NULL_TREE;
8748 if (!validate_arg (arg0, REAL_TYPE)
8749 || !validate_arg (arg1, POINTER_TYPE)
8750 || !validate_arg (arg2, POINTER_TYPE))
8751 return NULL_TREE;
8753 type = TREE_TYPE (arg0);
8755 /* Calculate the result when the argument is a constant. */
8756 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8757 if (fn == END_BUILTINS)
8758 return NULL_TREE;
8760 /* Canonicalize sincos to cexpi. */
8761 if (TREE_CODE (arg0) == REAL_CST)
8763 tree complex_type = build_complex_type (type);
8764 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8766 if (!call)
8768 if (!targetm.libc_has_function (function_c99_math_complex, type)
8769 || !builtin_decl_implicit_p (fn))
8770 return NULL_TREE;
8771 fndecl = builtin_decl_explicit (fn);
8772 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8773 call = builtin_save_expr (call);
8776 tree ptype = build_pointer_type (type);
8777 arg1 = fold_convert (ptype, arg1);
8778 arg2 = fold_convert (ptype, arg2);
8779 return build2 (COMPOUND_EXPR, void_type_node,
8780 build2 (MODIFY_EXPR, void_type_node,
8781 build_fold_indirect_ref_loc (loc, arg1),
8782 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8783 build2 (MODIFY_EXPR, void_type_node,
8784 build_fold_indirect_ref_loc (loc, arg2),
8785 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8788 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8789 Return NULL_TREE if no simplification can be made. */
8791 static tree
8792 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8794 if (!validate_arg (arg1, POINTER_TYPE)
8795 || !validate_arg (arg2, POINTER_TYPE)
8796 || !validate_arg (len, INTEGER_TYPE))
8797 return NULL_TREE;
8799 /* If the LEN parameter is zero, return zero. */
8800 if (integer_zerop (len))
8801 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8802 arg1, arg2);
8804 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8805 if (operand_equal_p (arg1, arg2, 0))
8806 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8808 /* If len parameter is one, return an expression corresponding to
8809 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8810 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8812 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8813 tree cst_uchar_ptr_node
8814 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8816 tree ind1
8817 = fold_convert_loc (loc, integer_type_node,
8818 build1 (INDIRECT_REF, cst_uchar_node,
8819 fold_convert_loc (loc,
8820 cst_uchar_ptr_node,
8821 arg1)));
8822 tree ind2
8823 = fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg2)));
8828 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8831 return NULL_TREE;
8834 /* Fold a call to builtin isascii with argument ARG. */
8836 static tree
8837 fold_builtin_isascii (location_t loc, tree arg)
8839 if (!validate_arg (arg, INTEGER_TYPE))
8840 return NULL_TREE;
8841 else
8843 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8844 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8845 build_int_cst (integer_type_node,
8846 ~ (unsigned HOST_WIDE_INT) 0x7f));
8847 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8848 arg, integer_zero_node);
8852 /* Fold a call to builtin toascii with argument ARG. */
8854 static tree
8855 fold_builtin_toascii (location_t loc, tree arg)
8857 if (!validate_arg (arg, INTEGER_TYPE))
8858 return NULL_TREE;
8860 /* Transform toascii(c) -> (c & 0x7f). */
8861 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8862 build_int_cst (integer_type_node, 0x7f));
8865 /* Fold a call to builtin isdigit with argument ARG. */
8867 static tree
8868 fold_builtin_isdigit (location_t loc, tree arg)
8870 if (!validate_arg (arg, INTEGER_TYPE))
8871 return NULL_TREE;
8872 else
8874 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8875 /* According to the C standard, isdigit is unaffected by locale.
8876 However, it definitely is affected by the target character set. */
8877 unsigned HOST_WIDE_INT target_digit0
8878 = lang_hooks.to_target_charset ('0');
8880 if (target_digit0 == 0)
8881 return NULL_TREE;
8883 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8884 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8885 build_int_cst (unsigned_type_node, target_digit0));
8886 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8887 build_int_cst (unsigned_type_node, 9));
8891 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8893 static tree
8894 fold_builtin_fabs (location_t loc, tree arg, tree type)
8896 if (!validate_arg (arg, REAL_TYPE))
8897 return NULL_TREE;
8899 arg = fold_convert_loc (loc, type, arg);
8900 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8903 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8905 static tree
8906 fold_builtin_abs (location_t loc, tree arg, tree type)
8908 if (!validate_arg (arg, INTEGER_TYPE))
8909 return NULL_TREE;
8911 arg = fold_convert_loc (loc, type, arg);
8912 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8915 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8917 static tree
8918 fold_builtin_carg (location_t loc, tree arg, tree type)
8920 if (validate_arg (arg, COMPLEX_TYPE)
8921 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
8923 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8925 if (atan2_fn)
8927 tree new_arg = builtin_save_expr (arg);
8928 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8929 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8930 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8934 return NULL_TREE;
8937 /* Fold a call to builtin frexp, we can assume the base is 2. */
8939 static tree
8940 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8942 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8943 return NULL_TREE;
8945 STRIP_NOPS (arg0);
8947 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8948 return NULL_TREE;
8950 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8952 /* Proceed if a valid pointer type was passed in. */
8953 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8955 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8956 tree frac, exp, res;
8958 switch (value->cl)
8960 case rvc_zero:
8961 /* For +-0, return (*exp = 0, +-0). */
8962 exp = integer_zero_node;
8963 frac = arg0;
8964 break;
8965 case rvc_nan:
8966 case rvc_inf:
8967 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8968 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8969 case rvc_normal:
8971 /* Since the frexp function always expects base 2, and in
8972 GCC normalized significands are already in the range
8973 [0.5, 1.0), we have exactly what frexp wants. */
8974 REAL_VALUE_TYPE frac_rvt = *value;
8975 SET_REAL_EXP (&frac_rvt, 0);
8976 frac = build_real (rettype, frac_rvt);
8977 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8979 break;
8980 default:
8981 gcc_unreachable ();
8984 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8985 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8986 TREE_SIDE_EFFECTS (arg1) = 1;
8987 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8988 suppress_warning (res, OPT_Wunused_value);
8989 return res;
8992 return NULL_TREE;
8995 /* Fold a call to builtin modf. */
8997 static tree
8998 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9000 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9001 return NULL_TREE;
9003 STRIP_NOPS (arg0);
9005 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9006 return NULL_TREE;
9008 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9010 /* Proceed if a valid pointer type was passed in. */
9011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9013 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9014 REAL_VALUE_TYPE trunc, frac;
9015 tree res;
9017 switch (value->cl)
9019 case rvc_nan:
9020 case rvc_zero:
9021 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9022 trunc = frac = *value;
9023 break;
9024 case rvc_inf:
9025 /* For +-Inf, return (*arg1 = arg0, +-0). */
9026 frac = dconst0;
9027 frac.sign = value->sign;
9028 trunc = *value;
9029 break;
9030 case rvc_normal:
9031 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9032 real_trunc (&trunc, VOIDmode, value);
9033 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9034 /* If the original number was negative and already
9035 integral, then the fractional part is -0.0. */
9036 if (value->sign && frac.cl == rvc_zero)
9037 frac.sign = value->sign;
9038 break;
9041 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9042 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9043 build_real (rettype, trunc));
9044 TREE_SIDE_EFFECTS (arg1) = 1;
9045 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9046 build_real (rettype, frac));
9047 suppress_warning (res, OPT_Wunused_value);
9048 return res;
9051 return NULL_TREE;
9054 /* Given a location LOC, an interclass builtin function decl FNDECL
9055 and its single argument ARG, return an folded expression computing
9056 the same, or NULL_TREE if we either couldn't or didn't want to fold
9057 (the latter happen if there's an RTL instruction available). */
9059 static tree
9060 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9062 machine_mode mode;
9064 if (!validate_arg (arg, REAL_TYPE))
9065 return NULL_TREE;
9067 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9068 return NULL_TREE;
9070 mode = TYPE_MODE (TREE_TYPE (arg));
9072 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9074 /* If there is no optab, try generic code. */
9075 switch (DECL_FUNCTION_CODE (fndecl))
9077 tree result;
9079 CASE_FLT_FN (BUILT_IN_ISINF):
9081 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9082 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9083 tree type = TREE_TYPE (arg);
9084 REAL_VALUE_TYPE r;
9085 char buf[128];
9087 if (is_ibm_extended)
9089 /* NaN and Inf are encoded in the high-order double value
9090 only. The low-order value is not significant. */
9091 type = double_type_node;
9092 mode = DFmode;
9093 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9095 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9096 real_from_string (&r, buf);
9097 result = build_call_expr (isgr_fn, 2,
9098 fold_build1_loc (loc, ABS_EXPR, type, arg),
9099 build_real (type, r));
9100 return result;
9102 CASE_FLT_FN (BUILT_IN_FINITE):
9103 case BUILT_IN_ISFINITE:
9105 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9106 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9107 tree type = TREE_TYPE (arg);
9108 REAL_VALUE_TYPE r;
9109 char buf[128];
9111 if (is_ibm_extended)
9113 /* NaN and Inf are encoded in the high-order double value
9114 only. The low-order value is not significant. */
9115 type = double_type_node;
9116 mode = DFmode;
9117 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9119 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9120 real_from_string (&r, buf);
9121 result = build_call_expr (isle_fn, 2,
9122 fold_build1_loc (loc, ABS_EXPR, type, arg),
9123 build_real (type, r));
9124 /*result = fold_build2_loc (loc, UNGT_EXPR,
9125 TREE_TYPE (TREE_TYPE (fndecl)),
9126 fold_build1_loc (loc, ABS_EXPR, type, arg),
9127 build_real (type, r));
9128 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9129 TREE_TYPE (TREE_TYPE (fndecl)),
9130 result);*/
9131 return result;
9133 case BUILT_IN_ISNORMAL:
9135 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9136 islessequal(fabs(x),DBL_MAX). */
9137 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9138 tree type = TREE_TYPE (arg);
9139 tree orig_arg, max_exp, min_exp;
9140 machine_mode orig_mode = mode;
9141 REAL_VALUE_TYPE rmax, rmin;
9142 char buf[128];
9144 orig_arg = arg = builtin_save_expr (arg);
9145 if (is_ibm_extended)
9147 /* Use double to test the normal range of IBM extended
9148 precision. Emin for IBM extended precision is
9149 different to emin for IEEE double, being 53 higher
9150 since the low double exponent is at least 53 lower
9151 than the high double exponent. */
9152 type = double_type_node;
9153 mode = DFmode;
9154 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9156 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9158 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9159 real_from_string (&rmax, buf);
9160 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9161 real_from_string (&rmin, buf);
9162 max_exp = build_real (type, rmax);
9163 min_exp = build_real (type, rmin);
9165 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9166 if (is_ibm_extended)
9168 /* Testing the high end of the range is done just using
9169 the high double, using the same test as isfinite().
9170 For the subnormal end of the range we first test the
9171 high double, then if its magnitude is equal to the
9172 limit of 0x1p-969, we test whether the low double is
9173 non-zero and opposite sign to the high double. */
9174 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9175 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9176 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9177 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9178 arg, min_exp);
9179 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9180 complex_double_type_node, orig_arg);
9181 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9182 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9183 tree zero = build_real (type, dconst0);
9184 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9185 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9186 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9187 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9188 fold_build3 (COND_EXPR,
9189 integer_type_node,
9190 hilt, logt, lolt));
9191 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9192 eq_min, ok_lo);
9193 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9194 gt_min, eq_min);
9196 else
9198 tree const isge_fn
9199 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9200 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9202 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9203 max_exp, min_exp);
9204 return result;
9206 default:
9207 break;
9210 return NULL_TREE;
9213 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9214 ARG is the argument for the call. */
9216 static tree
9217 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9219 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9221 if (!validate_arg (arg, REAL_TYPE))
9222 return NULL_TREE;
9224 switch (builtin_index)
9226 case BUILT_IN_ISINF:
9227 if (tree_expr_infinite_p (arg))
9228 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9229 if (!tree_expr_maybe_infinite_p (arg))
9230 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9231 return NULL_TREE;
9233 case BUILT_IN_ISINF_SIGN:
9235 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9236 /* In a boolean context, GCC will fold the inner COND_EXPR to
9237 1. So e.g. "if (isinf_sign(x))" would be folded to just
9238 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9239 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9240 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9241 tree tmp = NULL_TREE;
9243 arg = builtin_save_expr (arg);
9245 if (signbit_fn && isinf_fn)
9247 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9248 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9250 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9251 signbit_call, integer_zero_node);
9252 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9253 isinf_call, integer_zero_node);
9255 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9256 integer_minus_one_node, integer_one_node);
9257 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9258 isinf_call, tmp,
9259 integer_zero_node);
9262 return tmp;
9265 case BUILT_IN_ISFINITE:
9266 if (tree_expr_finite_p (arg))
9267 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9268 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9269 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9270 return NULL_TREE;
9272 case BUILT_IN_ISNAN:
9273 if (tree_expr_nan_p (arg))
9274 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9275 if (!tree_expr_maybe_nan_p (arg))
9276 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9279 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9280 if (is_ibm_extended)
9282 /* NaN and Inf are encoded in the high-order double value
9283 only. The low-order value is not significant. */
9284 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9287 arg = builtin_save_expr (arg);
9288 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9290 case BUILT_IN_ISSIGNALING:
9291 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9292 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9293 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9294 here, so there is some possibility of __builtin_issignaling working
9295 without -fsignaling-nans. Especially when -fno-signaling-nans is
9296 the default. */
9297 if (!tree_expr_maybe_nan_p (arg))
9298 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9299 return NULL_TREE;
9301 default:
9302 gcc_unreachable ();
9306 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9307 This builtin will generate code to return the appropriate floating
9308 point classification depending on the value of the floating point
9309 number passed in. The possible return values must be supplied as
9310 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9311 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9312 one floating point argument which is "type generic". */
9314 static tree
9315 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9317 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9318 arg, type, res, tmp;
9319 machine_mode mode;
9320 REAL_VALUE_TYPE r;
9321 char buf[128];
9323 /* Verify the required arguments in the original call. */
9324 if (nargs != 6
9325 || !validate_arg (args[0], INTEGER_TYPE)
9326 || !validate_arg (args[1], INTEGER_TYPE)
9327 || !validate_arg (args[2], INTEGER_TYPE)
9328 || !validate_arg (args[3], INTEGER_TYPE)
9329 || !validate_arg (args[4], INTEGER_TYPE)
9330 || !validate_arg (args[5], REAL_TYPE))
9331 return NULL_TREE;
9333 fp_nan = args[0];
9334 fp_infinite = args[1];
9335 fp_normal = args[2];
9336 fp_subnormal = args[3];
9337 fp_zero = args[4];
9338 arg = args[5];
9339 type = TREE_TYPE (arg);
9340 mode = TYPE_MODE (type);
9341 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9343 /* fpclassify(x) ->
9344 isnan(x) ? FP_NAN :
9345 (fabs(x) == Inf ? FP_INFINITE :
9346 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9347 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9349 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9350 build_real (type, dconst0));
9351 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9352 tmp, fp_zero, fp_subnormal);
9354 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9355 real_from_string (&r, buf);
9356 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9357 arg, build_real (type, r));
9358 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9360 if (tree_expr_maybe_infinite_p (arg))
9362 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9363 build_real (type, dconstinf));
9364 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9365 fp_infinite, res);
9368 if (tree_expr_maybe_nan_p (arg))
9370 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9371 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9374 return res;
9377 /* Fold a call to an unordered comparison function such as
9378 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9379 being called and ARG0 and ARG1 are the arguments for the call.
9380 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9381 the opposite of the desired result. UNORDERED_CODE is used
9382 for modes that can hold NaNs and ORDERED_CODE is used for
9383 the rest. */
9385 static tree
9386 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9387 enum tree_code unordered_code,
9388 enum tree_code ordered_code)
9390 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9391 enum tree_code code;
9392 tree type0, type1;
9393 enum tree_code code0, code1;
9394 tree cmp_type = NULL_TREE;
9396 type0 = TREE_TYPE (arg0);
9397 type1 = TREE_TYPE (arg1);
9399 code0 = TREE_CODE (type0);
9400 code1 = TREE_CODE (type1);
9402 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9403 /* Choose the wider of two real types. */
9404 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9405 ? type0 : type1;
9406 else if (code0 == REAL_TYPE
9407 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
9408 cmp_type = type0;
9409 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
9410 && code1 == REAL_TYPE)
9411 cmp_type = type1;
9413 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9414 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9416 if (unordered_code == UNORDERED_EXPR)
9418 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9419 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9420 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9421 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9422 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9425 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9426 ? unordered_code : ordered_code;
9427 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9428 fold_build2_loc (loc, code, type, arg0, arg1));
9431 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9432 After choosing the wider floating-point type for the comparison,
9433 the code is folded to:
9434 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9436 static tree
9437 fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
9439 tree type0, type1;
9440 enum tree_code code0, code1;
9441 tree cmp1, cmp2, cmp_type = NULL_TREE;
9443 type0 = TREE_TYPE (arg0);
9444 type1 = TREE_TYPE (arg1);
9446 code0 = TREE_CODE (type0);
9447 code1 = TREE_CODE (type1);
9449 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9450 /* Choose the wider of two real types. */
9451 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9452 ? type0 : type1;
9453 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9454 cmp_type = type0;
9455 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9456 cmp_type = type1;
9458 arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
9459 arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
9461 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
9462 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
9464 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
9467 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9468 arithmetics if it can never overflow, or into internal functions that
9469 return both result of arithmetics and overflowed boolean flag in
9470 a complex integer result, or some other check for overflow.
9471 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9472 checking part of that. */
9474 static tree
9475 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9476 tree arg0, tree arg1, tree arg2)
9478 enum internal_fn ifn = IFN_LAST;
9479 /* The code of the expression corresponding to the built-in. */
9480 enum tree_code opcode = ERROR_MARK;
9481 bool ovf_only = false;
9483 switch (fcode)
9485 case BUILT_IN_ADD_OVERFLOW_P:
9486 ovf_only = true;
9487 /* FALLTHRU */
9488 case BUILT_IN_ADD_OVERFLOW:
9489 case BUILT_IN_SADD_OVERFLOW:
9490 case BUILT_IN_SADDL_OVERFLOW:
9491 case BUILT_IN_SADDLL_OVERFLOW:
9492 case BUILT_IN_UADD_OVERFLOW:
9493 case BUILT_IN_UADDL_OVERFLOW:
9494 case BUILT_IN_UADDLL_OVERFLOW:
9495 opcode = PLUS_EXPR;
9496 ifn = IFN_ADD_OVERFLOW;
9497 break;
9498 case BUILT_IN_SUB_OVERFLOW_P:
9499 ovf_only = true;
9500 /* FALLTHRU */
9501 case BUILT_IN_SUB_OVERFLOW:
9502 case BUILT_IN_SSUB_OVERFLOW:
9503 case BUILT_IN_SSUBL_OVERFLOW:
9504 case BUILT_IN_SSUBLL_OVERFLOW:
9505 case BUILT_IN_USUB_OVERFLOW:
9506 case BUILT_IN_USUBL_OVERFLOW:
9507 case BUILT_IN_USUBLL_OVERFLOW:
9508 opcode = MINUS_EXPR;
9509 ifn = IFN_SUB_OVERFLOW;
9510 break;
9511 case BUILT_IN_MUL_OVERFLOW_P:
9512 ovf_only = true;
9513 /* FALLTHRU */
9514 case BUILT_IN_MUL_OVERFLOW:
9515 case BUILT_IN_SMUL_OVERFLOW:
9516 case BUILT_IN_SMULL_OVERFLOW:
9517 case BUILT_IN_SMULLL_OVERFLOW:
9518 case BUILT_IN_UMUL_OVERFLOW:
9519 case BUILT_IN_UMULL_OVERFLOW:
9520 case BUILT_IN_UMULLL_OVERFLOW:
9521 opcode = MULT_EXPR;
9522 ifn = IFN_MUL_OVERFLOW;
9523 break;
9524 default:
9525 gcc_unreachable ();
9528 /* For the "generic" overloads, the first two arguments can have different
9529 types and the last argument determines the target type to use to check
9530 for overflow. The arguments of the other overloads all have the same
9531 type. */
9532 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9534 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9535 arguments are constant, attempt to fold the built-in call into a constant
9536 expression indicating whether or not it detected an overflow. */
9537 if (ovf_only
9538 && TREE_CODE (arg0) == INTEGER_CST
9539 && TREE_CODE (arg1) == INTEGER_CST)
9540 /* Perform the computation in the target type and check for overflow. */
9541 return omit_one_operand_loc (loc, boolean_type_node,
9542 arith_overflowed_p (opcode, type, arg0, arg1)
9543 ? boolean_true_node : boolean_false_node,
9544 arg2);
9546 tree intres, ovfres;
9547 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9549 intres = fold_binary_loc (loc, opcode, type,
9550 fold_convert_loc (loc, type, arg0),
9551 fold_convert_loc (loc, type, arg1));
9552 if (TREE_OVERFLOW (intres))
9553 intres = drop_tree_overflow (intres);
9554 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9555 ? boolean_true_node : boolean_false_node);
9557 else
9559 tree ctype = build_complex_type (type);
9560 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9561 arg0, arg1);
9562 tree tgt = save_expr (call);
9563 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9564 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9565 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9568 if (ovf_only)
9569 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9571 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9572 tree store
9573 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9574 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9577 /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
9578 internal function. */
9580 static tree
9581 fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
9582 tree arg0, tree arg1)
9584 enum internal_fn ifn;
9585 enum built_in_function fcodei, fcodel, fcodell;
9586 tree arg0_type = TREE_TYPE (arg0);
9587 tree cast_type = NULL_TREE;
9588 int addend = 0;
9590 switch (fcode)
9592 case BUILT_IN_CLZG:
9593 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
9594 return NULL_TREE;
9595 ifn = IFN_CLZ;
9596 fcodei = BUILT_IN_CLZ;
9597 fcodel = BUILT_IN_CLZL;
9598 fcodell = BUILT_IN_CLZLL;
9599 break;
9600 case BUILT_IN_CTZG:
9601 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
9602 return NULL_TREE;
9603 ifn = IFN_CTZ;
9604 fcodei = BUILT_IN_CTZ;
9605 fcodel = BUILT_IN_CTZL;
9606 fcodell = BUILT_IN_CTZLL;
9607 break;
9608 case BUILT_IN_CLRSBG:
9609 ifn = IFN_CLRSB;
9610 fcodei = BUILT_IN_CLRSB;
9611 fcodel = BUILT_IN_CLRSBL;
9612 fcodell = BUILT_IN_CLRSBLL;
9613 break;
9614 case BUILT_IN_FFSG:
9615 ifn = IFN_FFS;
9616 fcodei = BUILT_IN_FFS;
9617 fcodel = BUILT_IN_FFSL;
9618 fcodell = BUILT_IN_FFSLL;
9619 break;
9620 case BUILT_IN_PARITYG:
9621 ifn = IFN_PARITY;
9622 fcodei = BUILT_IN_PARITY;
9623 fcodel = BUILT_IN_PARITYL;
9624 fcodell = BUILT_IN_PARITYLL;
9625 break;
9626 case BUILT_IN_POPCOUNTG:
9627 ifn = IFN_POPCOUNT;
9628 fcodei = BUILT_IN_POPCOUNT;
9629 fcodel = BUILT_IN_POPCOUNTL;
9630 fcodell = BUILT_IN_POPCOUNTLL;
9631 break;
9632 default:
9633 gcc_unreachable ();
9636 if (TYPE_PRECISION (arg0_type)
9637 <= TYPE_PRECISION (long_long_unsigned_type_node))
9639 if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
9641 cast_type = (TYPE_UNSIGNED (arg0_type)
9642 ? unsigned_type_node : integer_type_node);
9643 else if (TYPE_PRECISION (arg0_type)
9644 <= TYPE_PRECISION (long_unsigned_type_node))
9646 cast_type = (TYPE_UNSIGNED (arg0_type)
9647 ? long_unsigned_type_node : long_integer_type_node);
9648 fcodei = fcodel;
9650 else
9652 cast_type = (TYPE_UNSIGNED (arg0_type)
9653 ? long_long_unsigned_type_node
9654 : long_long_integer_type_node);
9655 fcodei = fcodell;
9658 else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
9660 cast_type
9661 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
9662 TYPE_UNSIGNED (arg0_type));
9663 gcc_assert (TYPE_PRECISION (cast_type)
9664 == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
9665 fcodei = END_BUILTINS;
9667 else
9668 fcodei = END_BUILTINS;
9669 if (cast_type)
9671 switch (fcode)
9673 case BUILT_IN_CLZG:
9674 case BUILT_IN_CLRSBG:
9675 addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
9676 break;
9677 default:
9678 break;
9680 arg0 = fold_convert (cast_type, arg0);
9681 arg0_type = cast_type;
9684 if (arg1)
9685 arg1 = fold_convert (integer_type_node, arg1);
9687 tree arg2 = arg1;
9688 if (fcode == BUILT_IN_CLZG && addend)
9690 if (arg1)
9691 arg0 = save_expr (arg0);
9692 arg2 = NULL_TREE;
9694 tree call = NULL_TREE, tem;
9695 if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
9696 && (TYPE_PRECISION (arg0_type)
9697 == 2 * TYPE_PRECISION (long_long_unsigned_type_node)))
9699 /* __int128 expansions using up to 2 long long builtins. */
9700 arg0 = save_expr (arg0);
9701 tree type = (TYPE_UNSIGNED (arg0_type)
9702 ? long_long_unsigned_type_node
9703 : long_long_integer_type_node);
9704 tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
9705 build_int_cst (integer_type_node,
9706 MAX_FIXED_MODE_SIZE / 2));
9707 hi = fold_convert (type, hi);
9708 tree lo = fold_convert (type, arg0);
9709 switch (fcode)
9711 case BUILT_IN_CLZG:
9712 call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
9713 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9714 build_int_cst (integer_type_node,
9715 MAX_FIXED_MODE_SIZE / 2));
9716 if (arg2)
9717 call = fold_build3 (COND_EXPR, integer_type_node,
9718 fold_build2 (NE_EXPR, boolean_type_node,
9719 lo, build_zero_cst (type)),
9720 call, arg2);
9721 call = fold_build3 (COND_EXPR, integer_type_node,
9722 fold_build2 (NE_EXPR, boolean_type_node,
9723 hi, build_zero_cst (type)),
9724 fold_builtin_bit_query (loc, fcode, hi,
9725 NULL_TREE),
9726 call);
9727 break;
9728 case BUILT_IN_CTZG:
9729 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9730 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9731 build_int_cst (integer_type_node,
9732 MAX_FIXED_MODE_SIZE / 2));
9733 if (arg2)
9734 call = fold_build3 (COND_EXPR, integer_type_node,
9735 fold_build2 (NE_EXPR, boolean_type_node,
9736 hi, build_zero_cst (type)),
9737 call, arg2);
9738 call = fold_build3 (COND_EXPR, integer_type_node,
9739 fold_build2 (NE_EXPR, boolean_type_node,
9740 lo, build_zero_cst (type)),
9741 fold_builtin_bit_query (loc, fcode, lo,
9742 NULL_TREE),
9743 call);
9744 break;
9745 case BUILT_IN_CLRSBG:
9746 tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
9747 tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
9748 build_int_cst (integer_type_node,
9749 MAX_FIXED_MODE_SIZE / 2));
9750 tem = fold_build3 (COND_EXPR, integer_type_node,
9751 fold_build2 (LT_EXPR, boolean_type_node,
9752 fold_build2 (BIT_XOR_EXPR, type,
9753 lo, hi),
9754 build_zero_cst (type)),
9755 build_int_cst (integer_type_node,
9756 MAX_FIXED_MODE_SIZE / 2 - 1),
9757 tem);
9758 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9759 call = save_expr (call);
9760 call = fold_build3 (COND_EXPR, integer_type_node,
9761 fold_build2 (NE_EXPR, boolean_type_node,
9762 call,
9763 build_int_cst (integer_type_node,
9764 MAX_FIXED_MODE_SIZE
9765 / 2 - 1)),
9766 call, tem);
9767 break;
9768 case BUILT_IN_FFSG:
9769 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
9770 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9771 build_int_cst (integer_type_node,
9772 MAX_FIXED_MODE_SIZE / 2));
9773 call = fold_build3 (COND_EXPR, integer_type_node,
9774 fold_build2 (NE_EXPR, boolean_type_node,
9775 hi, build_zero_cst (type)),
9776 call, integer_zero_node);
9777 call = fold_build3 (COND_EXPR, integer_type_node,
9778 fold_build2 (NE_EXPR, boolean_type_node,
9779 lo, build_zero_cst (type)),
9780 fold_builtin_bit_query (loc, fcode, lo,
9781 NULL_TREE),
9782 call);
9783 break;
9784 case BUILT_IN_PARITYG:
9785 call = fold_builtin_bit_query (loc, fcode,
9786 fold_build2 (BIT_XOR_EXPR, type,
9787 lo, hi), NULL_TREE);
9788 break;
9789 case BUILT_IN_POPCOUNTG:
9790 call = fold_build2 (PLUS_EXPR, integer_type_node,
9791 fold_builtin_bit_query (loc, fcode, hi,
9792 NULL_TREE),
9793 fold_builtin_bit_query (loc, fcode, lo,
9794 NULL_TREE));
9795 break;
9796 default:
9797 gcc_unreachable ();
9800 else
9802 /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
9803 value defined at zero during GIMPLE, or for large/huge _BitInt
9804 (which are then lowered during bitint lowering). */
9805 if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
9807 int val;
9808 if (fcode == BUILT_IN_CLZG)
9810 if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
9811 val) != 2
9812 || wi::to_widest (arg2) != val)
9813 arg2 = NULL_TREE;
9815 else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
9816 val) != 2
9817 || wi::to_widest (arg2) != val)
9818 arg2 = NULL_TREE;
9819 if (!direct_internal_fn_supported_p (ifn, arg0_type,
9820 OPTIMIZE_FOR_BOTH))
9821 arg2 = NULL_TREE;
9822 if (arg2 == NULL_TREE)
9823 arg0 = save_expr (arg0);
9825 if (fcodei == END_BUILTINS || arg2)
9826 call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
9827 arg2 ? 2 : 1, arg0, arg2);
9828 else
9829 call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
9830 arg0);
9832 if (addend)
9833 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
9834 build_int_cst (integer_type_node, addend));
9835 if (arg1 && arg2 == NULL_TREE)
9836 call = fold_build3 (COND_EXPR, integer_type_node,
9837 fold_build2 (NE_EXPR, boolean_type_node,
9838 arg0, build_zero_cst (arg0_type)),
9839 call, arg1);
9841 return call;
9844 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
9845 that return both result of arithmetics and overflowed boolean
9846 flag in a complex integer result. */
9848 static tree
9849 fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
9850 tree *args)
9852 enum internal_fn ifn;
9854 switch (fcode)
9856 case BUILT_IN_ADDC:
9857 case BUILT_IN_ADDCL:
9858 case BUILT_IN_ADDCLL:
9859 ifn = IFN_ADD_OVERFLOW;
9860 break;
9861 case BUILT_IN_SUBC:
9862 case BUILT_IN_SUBCL:
9863 case BUILT_IN_SUBCLL:
9864 ifn = IFN_SUB_OVERFLOW;
9865 break;
9866 default:
9867 gcc_unreachable ();
9870 tree type = TREE_TYPE (args[0]);
9871 tree ctype = build_complex_type (type);
9872 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9873 args[0], args[1]);
9874 tree tgt = save_expr (call);
9875 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9876 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9877 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9878 intres, args[2]);
9879 tgt = save_expr (call);
9880 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9881 tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9882 ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
9883 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
9884 tree store
9885 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
9886 return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
9889 /* Fold a call to __builtin_FILE to a constant string. */
9891 static inline tree
9892 fold_builtin_FILE (location_t loc)
9894 if (const char *fname = LOCATION_FILE (loc))
9896 /* The documentation says this builtin is equivalent to the preprocessor
9897 __FILE__ macro so it appears appropriate to use the same file prefix
9898 mappings. */
9899 fname = remap_macro_filename (fname);
9900 return build_string_literal (fname);
9903 return build_string_literal ("");
9906 /* Fold a call to __builtin_FUNCTION to a constant string. */
9908 static inline tree
9909 fold_builtin_FUNCTION ()
9911 const char *name = "";
9913 if (current_function_decl)
9914 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9916 return build_string_literal (name);
9919 /* Fold a call to __builtin_LINE to an integer constant. */
9921 static inline tree
9922 fold_builtin_LINE (location_t loc, tree type)
9924 return build_int_cst (type, LOCATION_LINE (loc));
9927 /* Fold a call to built-in function FNDECL with 0 arguments.
9928 This function returns NULL_TREE if no simplification was possible. */
9930 static tree
9931 fold_builtin_0 (location_t loc, tree fndecl)
9933 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9934 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9935 switch (fcode)
9937 case BUILT_IN_FILE:
9938 return fold_builtin_FILE (loc);
9940 case BUILT_IN_FUNCTION:
9941 return fold_builtin_FUNCTION ();
9943 case BUILT_IN_LINE:
9944 return fold_builtin_LINE (loc, type);
9946 CASE_FLT_FN (BUILT_IN_INF):
9947 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9948 case BUILT_IN_INFD32:
9949 case BUILT_IN_INFD64:
9950 case BUILT_IN_INFD128:
9951 return fold_builtin_inf (loc, type, true);
9953 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9954 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9955 return fold_builtin_inf (loc, type, false);
9957 case BUILT_IN_CLASSIFY_TYPE:
9958 return fold_builtin_classify_type (NULL_TREE);
9960 case BUILT_IN_UNREACHABLE:
9961 /* Rewrite any explicit calls to __builtin_unreachable. */
9962 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
9963 return build_builtin_unreachable (loc);
9964 break;
9966 default:
9967 break;
9969 return NULL_TREE;
9972 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9973 This function returns NULL_TREE if no simplification was possible. */
9975 static tree
9976 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9978 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9979 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9981 if (TREE_CODE (arg0) == ERROR_MARK)
9982 return NULL_TREE;
9984 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9985 return ret;
9987 switch (fcode)
9989 case BUILT_IN_CONSTANT_P:
9991 tree val = fold_builtin_constant_p (arg0);
9993 /* Gimplification will pull the CALL_EXPR for the builtin out of
9994 an if condition. When not optimizing, we'll not CSE it back.
9995 To avoid link error types of regressions, return false now. */
9996 if (!val && !optimize)
9997 val = integer_zero_node;
9999 return val;
10002 case BUILT_IN_CLASSIFY_TYPE:
10003 return fold_builtin_classify_type (arg0);
10005 case BUILT_IN_STRLEN:
10006 return fold_builtin_strlen (loc, expr, type, arg0);
10008 CASE_FLT_FN (BUILT_IN_FABS):
10009 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10010 case BUILT_IN_FABSD32:
10011 case BUILT_IN_FABSD64:
10012 case BUILT_IN_FABSD128:
10013 return fold_builtin_fabs (loc, arg0, type);
10015 case BUILT_IN_ABS:
10016 case BUILT_IN_LABS:
10017 case BUILT_IN_LLABS:
10018 case BUILT_IN_IMAXABS:
10019 return fold_builtin_abs (loc, arg0, type);
10021 CASE_FLT_FN (BUILT_IN_CONJ):
10022 if (validate_arg (arg0, COMPLEX_TYPE)
10023 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10024 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10025 break;
10027 CASE_FLT_FN (BUILT_IN_CREAL):
10028 if (validate_arg (arg0, COMPLEX_TYPE)
10029 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10030 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10031 break;
10033 CASE_FLT_FN (BUILT_IN_CIMAG):
10034 if (validate_arg (arg0, COMPLEX_TYPE)
10035 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10036 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10037 break;
10039 CASE_FLT_FN (BUILT_IN_CARG):
10040 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10041 return fold_builtin_carg (loc, arg0, type);
10043 case BUILT_IN_ISASCII:
10044 return fold_builtin_isascii (loc, arg0);
10046 case BUILT_IN_TOASCII:
10047 return fold_builtin_toascii (loc, arg0);
10049 case BUILT_IN_ISDIGIT:
10050 return fold_builtin_isdigit (loc, arg0);
10052 CASE_FLT_FN (BUILT_IN_FINITE):
10053 case BUILT_IN_FINITED32:
10054 case BUILT_IN_FINITED64:
10055 case BUILT_IN_FINITED128:
10056 case BUILT_IN_ISFINITE:
10058 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10059 if (ret)
10060 return ret;
10061 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10064 CASE_FLT_FN (BUILT_IN_ISINF):
10065 case BUILT_IN_ISINFD32:
10066 case BUILT_IN_ISINFD64:
10067 case BUILT_IN_ISINFD128:
10069 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10070 if (ret)
10071 return ret;
10072 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10075 case BUILT_IN_ISNORMAL:
10076 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10078 case BUILT_IN_ISINF_SIGN:
10079 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10081 CASE_FLT_FN (BUILT_IN_ISNAN):
10082 case BUILT_IN_ISNAND32:
10083 case BUILT_IN_ISNAND64:
10084 case BUILT_IN_ISNAND128:
10085 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10087 case BUILT_IN_ISSIGNALING:
10088 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10090 case BUILT_IN_FREE:
10091 if (integer_zerop (arg0))
10092 return build_empty_stmt (loc);
10093 break;
10095 case BUILT_IN_CLZG:
10096 case BUILT_IN_CTZG:
10097 case BUILT_IN_CLRSBG:
10098 case BUILT_IN_FFSG:
10099 case BUILT_IN_PARITYG:
10100 case BUILT_IN_POPCOUNTG:
10101 return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10103 default:
10104 break;
10107 return NULL_TREE;
10111 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10112 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10113 if no simplification was possible. */
10115 static tree
10116 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10118 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10119 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10121 if (TREE_CODE (arg0) == ERROR_MARK
10122 || TREE_CODE (arg1) == ERROR_MARK)
10123 return NULL_TREE;
10125 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10126 return ret;
10128 switch (fcode)
10130 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10131 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10132 if (validate_arg (arg0, REAL_TYPE)
10133 && validate_arg (arg1, POINTER_TYPE))
10134 return do_mpfr_lgamma_r (arg0, arg1, type);
10135 break;
10137 CASE_FLT_FN (BUILT_IN_FREXP):
10138 return fold_builtin_frexp (loc, arg0, arg1, type);
10140 CASE_FLT_FN (BUILT_IN_MODF):
10141 return fold_builtin_modf (loc, arg0, arg1, type);
10143 case BUILT_IN_STRSPN:
10144 return fold_builtin_strspn (loc, expr, arg0, arg1);
10146 case BUILT_IN_STRCSPN:
10147 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10149 case BUILT_IN_STRPBRK:
10150 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10152 case BUILT_IN_EXPECT:
10153 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10155 case BUILT_IN_ISGREATER:
10156 return fold_builtin_unordered_cmp (loc, fndecl,
10157 arg0, arg1, UNLE_EXPR, LE_EXPR);
10158 case BUILT_IN_ISGREATEREQUAL:
10159 return fold_builtin_unordered_cmp (loc, fndecl,
10160 arg0, arg1, UNLT_EXPR, LT_EXPR);
10161 case BUILT_IN_ISLESS:
10162 return fold_builtin_unordered_cmp (loc, fndecl,
10163 arg0, arg1, UNGE_EXPR, GE_EXPR);
10164 case BUILT_IN_ISLESSEQUAL:
10165 return fold_builtin_unordered_cmp (loc, fndecl,
10166 arg0, arg1, UNGT_EXPR, GT_EXPR);
10167 case BUILT_IN_ISLESSGREATER:
10168 return fold_builtin_unordered_cmp (loc, fndecl,
10169 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10170 case BUILT_IN_ISUNORDERED:
10171 return fold_builtin_unordered_cmp (loc, fndecl,
10172 arg0, arg1, UNORDERED_EXPR,
10173 NOP_EXPR);
10175 case BUILT_IN_ISEQSIG:
10176 return fold_builtin_iseqsig (loc, arg0, arg1);
10178 /* We do the folding for va_start in the expander. */
10179 case BUILT_IN_VA_START:
10180 break;
10182 case BUILT_IN_OBJECT_SIZE:
10183 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10184 return fold_builtin_object_size (arg0, arg1, fcode);
10186 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10187 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10189 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10190 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10192 case BUILT_IN_CLZG:
10193 case BUILT_IN_CTZG:
10194 return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10196 default:
10197 break;
10199 return NULL_TREE;
10202 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10203 and ARG2.
10204 This function returns NULL_TREE if no simplification was possible. */
10206 static tree
10207 fold_builtin_3 (location_t loc, tree fndecl,
10208 tree arg0, tree arg1, tree arg2)
10210 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10211 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10213 if (TREE_CODE (arg0) == ERROR_MARK
10214 || TREE_CODE (arg1) == ERROR_MARK
10215 || TREE_CODE (arg2) == ERROR_MARK)
10216 return NULL_TREE;
10218 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10219 arg0, arg1, arg2))
10220 return ret;
10222 switch (fcode)
10225 CASE_FLT_FN (BUILT_IN_SINCOS):
10226 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10228 CASE_FLT_FN (BUILT_IN_REMQUO):
10229 if (validate_arg (arg0, REAL_TYPE)
10230 && validate_arg (arg1, REAL_TYPE)
10231 && validate_arg (arg2, POINTER_TYPE))
10232 return do_mpfr_remquo (arg0, arg1, arg2);
10233 break;
10235 case BUILT_IN_MEMCMP:
10236 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10238 case BUILT_IN_EXPECT:
10239 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10241 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10242 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10244 case BUILT_IN_ADD_OVERFLOW:
10245 case BUILT_IN_SUB_OVERFLOW:
10246 case BUILT_IN_MUL_OVERFLOW:
10247 case BUILT_IN_ADD_OVERFLOW_P:
10248 case BUILT_IN_SUB_OVERFLOW_P:
10249 case BUILT_IN_MUL_OVERFLOW_P:
10250 case BUILT_IN_SADD_OVERFLOW:
10251 case BUILT_IN_SADDL_OVERFLOW:
10252 case BUILT_IN_SADDLL_OVERFLOW:
10253 case BUILT_IN_SSUB_OVERFLOW:
10254 case BUILT_IN_SSUBL_OVERFLOW:
10255 case BUILT_IN_SSUBLL_OVERFLOW:
10256 case BUILT_IN_SMUL_OVERFLOW:
10257 case BUILT_IN_SMULL_OVERFLOW:
10258 case BUILT_IN_SMULLL_OVERFLOW:
10259 case BUILT_IN_UADD_OVERFLOW:
10260 case BUILT_IN_UADDL_OVERFLOW:
10261 case BUILT_IN_UADDLL_OVERFLOW:
10262 case BUILT_IN_USUB_OVERFLOW:
10263 case BUILT_IN_USUBL_OVERFLOW:
10264 case BUILT_IN_USUBLL_OVERFLOW:
10265 case BUILT_IN_UMUL_OVERFLOW:
10266 case BUILT_IN_UMULL_OVERFLOW:
10267 case BUILT_IN_UMULLL_OVERFLOW:
10268 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10270 default:
10271 break;
10273 return NULL_TREE;
10276 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10277 ARGS is an array of NARGS arguments. IGNORE is true if the result
10278 of the function call is ignored. This function returns NULL_TREE
10279 if no simplification was possible. */
10281 static tree
10282 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10283 int nargs, bool)
10285 tree ret = NULL_TREE;
10287 switch (nargs)
10289 case 0:
10290 ret = fold_builtin_0 (loc, fndecl);
10291 break;
10292 case 1:
10293 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10294 break;
10295 case 2:
10296 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10297 break;
10298 case 3:
10299 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10300 break;
10301 default:
10302 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10303 break;
10305 if (ret)
10307 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10308 SET_EXPR_LOCATION (ret, loc);
10309 return ret;
10311 return NULL_TREE;
10314 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10315 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10316 of arguments in ARGS to be omitted. OLDNARGS is the number of
10317 elements in ARGS. */
10319 static tree
10320 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10321 int skip, tree fndecl, int n, va_list newargs)
10323 int nargs = oldnargs - skip + n;
10324 tree *buffer;
10326 if (n > 0)
10328 int i, j;
10330 buffer = XALLOCAVEC (tree, nargs);
10331 for (i = 0; i < n; i++)
10332 buffer[i] = va_arg (newargs, tree);
10333 for (j = skip; j < oldnargs; j++, i++)
10334 buffer[i] = args[j];
10336 else
10337 buffer = args + skip;
10339 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10342 /* Return true if FNDECL shouldn't be folded right now.
10343 If a built-in function has an inline attribute always_inline
10344 wrapper, defer folding it after always_inline functions have
10345 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10346 might not be performed. */
10348 bool
10349 avoid_folding_inline_builtin (tree fndecl)
10351 return (DECL_DECLARED_INLINE_P (fndecl)
10352 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10353 && cfun
10354 && !cfun->always_inline_functions_inlined
10355 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10358 /* A wrapper function for builtin folding that prevents warnings for
10359 "statement without effect" and the like, caused by removing the
10360 call node earlier than the warning is generated. */
10362 tree
10363 fold_call_expr (location_t loc, tree exp, bool ignore)
10365 tree ret = NULL_TREE;
10366 tree fndecl = get_callee_fndecl (exp);
10367 if (fndecl && fndecl_built_in_p (fndecl)
10368 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10369 yet. Defer folding until we see all the arguments
10370 (after inlining). */
10371 && !CALL_EXPR_VA_ARG_PACK (exp))
10373 int nargs = call_expr_nargs (exp);
10375 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10376 instead last argument is __builtin_va_arg_pack (). Defer folding
10377 even in that case, until arguments are finalized. */
10378 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10380 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10381 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10382 return NULL_TREE;
10385 if (avoid_folding_inline_builtin (fndecl))
10386 return NULL_TREE;
10388 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10389 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10390 CALL_EXPR_ARGP (exp), ignore);
10391 else
10393 tree *args = CALL_EXPR_ARGP (exp);
10394 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10395 if (ret)
10396 return ret;
10399 return NULL_TREE;
10402 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10403 N arguments are passed in the array ARGARRAY. Return a folded
10404 expression or NULL_TREE if no simplification was possible. */
10406 tree
10407 fold_builtin_call_array (location_t loc, tree,
10408 tree fn,
10409 int n,
10410 tree *argarray)
10412 if (TREE_CODE (fn) != ADDR_EXPR)
10413 return NULL_TREE;
10415 tree fndecl = TREE_OPERAND (fn, 0);
10416 if (TREE_CODE (fndecl) == FUNCTION_DECL
10417 && fndecl_built_in_p (fndecl))
10419 /* If last argument is __builtin_va_arg_pack (), arguments to this
10420 function are not finalized yet. Defer folding until they are. */
10421 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10423 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10424 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10425 return NULL_TREE;
10427 if (avoid_folding_inline_builtin (fndecl))
10428 return NULL_TREE;
10429 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10430 return targetm.fold_builtin (fndecl, n, argarray, false);
10431 else
10432 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10435 return NULL_TREE;
10438 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10439 along with N new arguments specified as the "..." parameters. SKIP
10440 is the number of arguments in EXP to be omitted. This function is used
10441 to do varargs-to-varargs transformations. */
10443 static tree
10444 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10446 va_list ap;
10447 tree t;
10449 va_start (ap, n);
10450 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10451 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10452 va_end (ap);
10454 return t;
10457 /* Validate a single argument ARG against a tree code CODE representing
10458 a type. Return true when argument is valid. */
10460 static bool
10461 validate_arg (const_tree arg, enum tree_code code)
10463 if (!arg)
10464 return false;
10465 else if (code == POINTER_TYPE)
10466 return POINTER_TYPE_P (TREE_TYPE (arg));
10467 else if (code == INTEGER_TYPE)
10468 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10469 return code == TREE_CODE (TREE_TYPE (arg));
10472 /* This function validates the types of a function call argument list
10473 against a specified list of tree_codes. If the last specifier is a 0,
10474 that represents an ellipses, otherwise the last specifier must be a
10475 VOID_TYPE.
10477 This is the GIMPLE version of validate_arglist. Eventually we want to
10478 completely convert builtins.cc to work from GIMPLEs and the tree based
10479 validate_arglist will then be removed. */
10481 bool
10482 validate_gimple_arglist (const gcall *call, ...)
10484 enum tree_code code;
10485 bool res = 0;
10486 va_list ap;
10487 const_tree arg;
10488 size_t i;
10490 va_start (ap, call);
10491 i = 0;
10495 code = (enum tree_code) va_arg (ap, int);
10496 switch (code)
10498 case 0:
10499 /* This signifies an ellipses, any further arguments are all ok. */
10500 res = true;
10501 goto end;
10502 case VOID_TYPE:
10503 /* This signifies an endlink, if no arguments remain, return
10504 true, otherwise return false. */
10505 res = (i == gimple_call_num_args (call));
10506 goto end;
10507 default:
10508 /* If no parameters remain or the parameter's code does not
10509 match the specified code, return false. Otherwise continue
10510 checking any remaining arguments. */
10511 arg = gimple_call_arg (call, i++);
10512 if (!validate_arg (arg, code))
10513 goto end;
10514 break;
10517 while (1);
10519 /* We need gotos here since we can only have one VA_CLOSE in a
10520 function. */
10521 end: ;
10522 va_end (ap);
10524 return res;
10527 /* Default target-specific builtin expander that does nothing. */
10530 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10531 rtx target ATTRIBUTE_UNUSED,
10532 rtx subtarget ATTRIBUTE_UNUSED,
10533 machine_mode mode ATTRIBUTE_UNUSED,
10534 int ignore ATTRIBUTE_UNUSED)
10536 return NULL_RTX;
10539 /* Returns true is EXP represents data that would potentially reside
10540 in a readonly section. */
10542 bool
10543 readonly_data_expr (tree exp)
10545 STRIP_NOPS (exp);
10547 if (TREE_CODE (exp) != ADDR_EXPR)
10548 return false;
10550 exp = get_base_address (TREE_OPERAND (exp, 0));
10551 if (!exp)
10552 return false;
10554 /* Make sure we call decl_readonly_section only for trees it
10555 can handle (since it returns true for everything it doesn't
10556 understand). */
10557 if (TREE_CODE (exp) == STRING_CST
10558 || TREE_CODE (exp) == CONSTRUCTOR
10559 || (VAR_P (exp) && TREE_STATIC (exp)))
10560 return decl_readonly_section (exp, 0);
10561 else
10562 return false;
10565 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10566 to the call, and TYPE is its return type.
10568 Return NULL_TREE if no simplification was possible, otherwise return the
10569 simplified form of the call as a tree.
10571 The simplified form may be a constant or other expression which
10572 computes the same value, but in a more efficient manner (including
10573 calls to other builtin functions).
10575 The call may contain arguments which need to be evaluated, but
10576 which are not useful to determine the result of the call. In
10577 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10578 COMPOUND_EXPR will be an argument which must be evaluated.
10579 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10580 COMPOUND_EXPR in the chain will contain the tree for the simplified
10581 form of the builtin function call. */
10583 static tree
10584 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
10586 if (!validate_arg (s1, POINTER_TYPE)
10587 || !validate_arg (s2, POINTER_TYPE))
10588 return NULL_TREE;
10590 tree fn;
10591 const char *p1, *p2;
10593 p2 = c_getstr (s2);
10594 if (p2 == NULL)
10595 return NULL_TREE;
10597 p1 = c_getstr (s1);
10598 if (p1 != NULL)
10600 const char *r = strpbrk (p1, p2);
10601 tree tem;
10603 if (r == NULL)
10604 return build_int_cst (TREE_TYPE (s1), 0);
10606 /* Return an offset into the constant string argument. */
10607 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10608 return fold_convert_loc (loc, type, tem);
10611 if (p2[0] == '\0')
10612 /* strpbrk(x, "") == NULL.
10613 Evaluate and ignore s1 in case it had side-effects. */
10614 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10616 if (p2[1] != '\0')
10617 return NULL_TREE; /* Really call strpbrk. */
10619 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10620 if (!fn)
10621 return NULL_TREE;
10623 /* New argument list transforming strpbrk(s1, s2) to
10624 strchr(s1, s2[0]). */
10625 return build_call_expr_loc (loc, fn, 2, s1,
10626 build_int_cst (integer_type_node, p2[0]));
10629 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10630 to the call.
10632 Return NULL_TREE if no simplification was possible, otherwise return the
10633 simplified form of the call as a tree.
10635 The simplified form may be a constant or other expression which
10636 computes the same value, but in a more efficient manner (including
10637 calls to other builtin functions).
10639 The call may contain arguments which need to be evaluated, but
10640 which are not useful to determine the result of the call. In
10641 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10642 COMPOUND_EXPR will be an argument which must be evaluated.
10643 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10644 COMPOUND_EXPR in the chain will contain the tree for the simplified
10645 form of the builtin function call. */
10647 static tree
10648 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10650 if (!validate_arg (s1, POINTER_TYPE)
10651 || !validate_arg (s2, POINTER_TYPE))
10652 return NULL_TREE;
10654 if (!check_nul_terminated_array (expr, s1)
10655 || !check_nul_terminated_array (expr, s2))
10656 return NULL_TREE;
10658 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10660 /* If either argument is "", return NULL_TREE. */
10661 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10662 /* Evaluate and ignore both arguments in case either one has
10663 side-effects. */
10664 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10665 s1, s2);
10666 return NULL_TREE;
10669 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10670 to the call.
10672 Return NULL_TREE if no simplification was possible, otherwise return the
10673 simplified form of the call as a tree.
10675 The simplified form may be a constant or other expression which
10676 computes the same value, but in a more efficient manner (including
10677 calls to other builtin functions).
10679 The call may contain arguments which need to be evaluated, but
10680 which are not useful to determine the result of the call. In
10681 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10682 COMPOUND_EXPR will be an argument which must be evaluated.
10683 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10684 COMPOUND_EXPR in the chain will contain the tree for the simplified
10685 form of the builtin function call. */
10687 static tree
10688 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10690 if (!validate_arg (s1, POINTER_TYPE)
10691 || !validate_arg (s2, POINTER_TYPE))
10692 return NULL_TREE;
10694 if (!check_nul_terminated_array (expr, s1)
10695 || !check_nul_terminated_array (expr, s2))
10696 return NULL_TREE;
10698 /* If the first argument is "", return NULL_TREE. */
10699 const char *p1 = c_getstr (s1);
10700 if (p1 && *p1 == '\0')
10702 /* Evaluate and ignore argument s2 in case it has
10703 side-effects. */
10704 return omit_one_operand_loc (loc, size_type_node,
10705 size_zero_node, s2);
10708 /* If the second argument is "", return __builtin_strlen(s1). */
10709 const char *p2 = c_getstr (s2);
10710 if (p2 && *p2 == '\0')
10712 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10714 /* If the replacement _DECL isn't initialized, don't do the
10715 transformation. */
10716 if (!fn)
10717 return NULL_TREE;
10719 return build_call_expr_loc (loc, fn, 1, s1);
10721 return NULL_TREE;
10724 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10725 produced. False otherwise. This is done so that we don't output the error
10726 or warning twice or three times. */
10728 bool
10729 fold_builtin_next_arg (tree exp, bool va_start_p)
10731 tree fntype = TREE_TYPE (current_function_decl);
10732 int nargs = call_expr_nargs (exp);
10733 tree arg;
10734 /* There is good chance the current input_location points inside the
10735 definition of the va_start macro (perhaps on the token for
10736 builtin) in a system header, so warnings will not be emitted.
10737 Use the location in real source code. */
10738 location_t current_location =
10739 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10740 NULL);
10742 if (!stdarg_p (fntype))
10744 error ("%<va_start%> used in function with fixed arguments");
10745 return true;
10748 if (va_start_p)
10750 if (va_start_p && (nargs != 2))
10752 error ("wrong number of arguments to function %<va_start%>");
10753 return true;
10755 arg = CALL_EXPR_ARG (exp, 1);
10757 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10758 when we checked the arguments and if needed issued a warning. */
10759 else
10761 if (nargs == 0)
10763 /* Evidently an out of date version of <stdarg.h>; can't validate
10764 va_start's second argument, but can still work as intended. */
10765 warning_at (current_location,
10766 OPT_Wvarargs,
10767 "%<__builtin_next_arg%> called without an argument");
10768 return true;
10770 else if (nargs > 1)
10772 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10773 return true;
10775 arg = CALL_EXPR_ARG (exp, 0);
10778 if (TREE_CODE (arg) == SSA_NAME
10779 && SSA_NAME_VAR (arg))
10780 arg = SSA_NAME_VAR (arg);
10782 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10783 or __builtin_next_arg (0) the first time we see it, after checking
10784 the arguments and if needed issuing a warning. */
10785 if (!integer_zerop (arg))
10787 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10789 /* Strip off all nops for the sake of the comparison. This
10790 is not quite the same as STRIP_NOPS. It does more.
10791 We must also strip off INDIRECT_EXPR for C++ reference
10792 parameters. */
10793 while (CONVERT_EXPR_P (arg)
10794 || INDIRECT_REF_P (arg))
10795 arg = TREE_OPERAND (arg, 0);
10796 if (arg != last_parm)
10798 /* FIXME: Sometimes with the tree optimizers we can get the
10799 not the last argument even though the user used the last
10800 argument. We just warn and set the arg to be the last
10801 argument so that we will get wrong-code because of
10802 it. */
10803 warning_at (current_location,
10804 OPT_Wvarargs,
10805 "second parameter of %<va_start%> not last named argument");
10808 /* Undefined by C99 7.15.1.4p4 (va_start):
10809 "If the parameter parmN is declared with the register storage
10810 class, with a function or array type, or with a type that is
10811 not compatible with the type that results after application of
10812 the default argument promotions, the behavior is undefined."
10814 else if (DECL_REGISTER (arg))
10816 warning_at (current_location,
10817 OPT_Wvarargs,
10818 "undefined behavior when second parameter of "
10819 "%<va_start%> is declared with %<register%> storage");
10822 /* We want to verify the second parameter just once before the tree
10823 optimizers are run and then avoid keeping it in the tree,
10824 as otherwise we could warn even for correct code like:
10825 void foo (int i, ...)
10826 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10827 if (va_start_p)
10828 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10829 else
10830 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10832 return false;
10836 /* Expand a call EXP to __builtin_object_size. */
10838 static rtx
10839 expand_builtin_object_size (tree exp)
10841 tree ost;
10842 int object_size_type;
10843 tree fndecl = get_callee_fndecl (exp);
10845 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10847 error ("first argument of %qD must be a pointer, second integer constant",
10848 fndecl);
10849 expand_builtin_trap ();
10850 return const0_rtx;
10853 ost = CALL_EXPR_ARG (exp, 1);
10854 STRIP_NOPS (ost);
10856 if (TREE_CODE (ost) != INTEGER_CST
10857 || tree_int_cst_sgn (ost) < 0
10858 || compare_tree_int (ost, 3) > 0)
10860 error ("last argument of %qD is not integer constant between 0 and 3",
10861 fndecl);
10862 expand_builtin_trap ();
10863 return const0_rtx;
10866 object_size_type = tree_to_shwi (ost);
10868 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10871 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10872 FCODE is the BUILT_IN_* to use.
10873 Return NULL_RTX if we failed; the caller should emit a normal call,
10874 otherwise try to get the result in TARGET, if convenient (and in
10875 mode MODE if that's convenient). */
10877 static rtx
10878 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10879 enum built_in_function fcode)
10881 if (!validate_arglist (exp,
10882 POINTER_TYPE,
10883 fcode == BUILT_IN_MEMSET_CHK
10884 ? INTEGER_TYPE : POINTER_TYPE,
10885 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10886 return NULL_RTX;
10888 tree dest = CALL_EXPR_ARG (exp, 0);
10889 tree src = CALL_EXPR_ARG (exp, 1);
10890 tree len = CALL_EXPR_ARG (exp, 2);
10891 tree size = CALL_EXPR_ARG (exp, 3);
10893 /* FIXME: Set access mode to write only for memset et al. */
10894 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10895 /*srcstr=*/NULL_TREE, size, access_read_write);
10897 if (!tree_fits_uhwi_p (size))
10898 return NULL_RTX;
10900 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10902 /* Avoid transforming the checking call to an ordinary one when
10903 an overflow has been detected or when the call couldn't be
10904 validated because the size is not constant. */
10905 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10906 return NULL_RTX;
10908 tree fn = NULL_TREE;
10909 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10910 mem{cpy,pcpy,move,set} is available. */
10911 switch (fcode)
10913 case BUILT_IN_MEMCPY_CHK:
10914 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10915 break;
10916 case BUILT_IN_MEMPCPY_CHK:
10917 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10918 break;
10919 case BUILT_IN_MEMMOVE_CHK:
10920 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10921 break;
10922 case BUILT_IN_MEMSET_CHK:
10923 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10924 break;
10925 default:
10926 break;
10929 if (! fn)
10930 return NULL_RTX;
10932 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10933 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10934 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10935 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10937 else if (fcode == BUILT_IN_MEMSET_CHK)
10938 return NULL_RTX;
10939 else
10941 unsigned int dest_align = get_pointer_alignment (dest);
10943 /* If DEST is not a pointer type, call the normal function. */
10944 if (dest_align == 0)
10945 return NULL_RTX;
10947 /* If SRC and DEST are the same (and not volatile), do nothing. */
10948 if (operand_equal_p (src, dest, 0))
10950 tree expr;
10952 if (fcode != BUILT_IN_MEMPCPY_CHK)
10954 /* Evaluate and ignore LEN in case it has side-effects. */
10955 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10956 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10959 expr = fold_build_pointer_plus (dest, len);
10960 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10963 /* __memmove_chk special case. */
10964 if (fcode == BUILT_IN_MEMMOVE_CHK)
10966 unsigned int src_align = get_pointer_alignment (src);
10968 if (src_align == 0)
10969 return NULL_RTX;
10971 /* If src is categorized for a readonly section we can use
10972 normal __memcpy_chk. */
10973 if (readonly_data_expr (src))
10975 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10976 if (!fn)
10977 return NULL_RTX;
10978 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10979 dest, src, len, size);
10980 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10981 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10982 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10985 return NULL_RTX;
10989 /* Emit warning if a buffer overflow is detected at compile time. */
10991 static void
10992 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10994 /* The source string. */
10995 tree srcstr = NULL_TREE;
10996 /* The size of the destination object returned by __builtin_object_size. */
10997 tree objsize = NULL_TREE;
10998 /* The string that is being concatenated with (as in __strcat_chk)
10999 or null if it isn't. */
11000 tree catstr = NULL_TREE;
11001 /* The maximum length of the source sequence in a bounded operation
11002 (such as __strncat_chk) or null if the operation isn't bounded
11003 (such as __strcat_chk). */
11004 tree maxread = NULL_TREE;
11005 /* The exact size of the access (such as in __strncpy_chk). */
11006 tree size = NULL_TREE;
11007 /* The access by the function that's checked. Except for snprintf
11008 both writing and reading is checked. */
11009 access_mode mode = access_read_write;
11011 switch (fcode)
11013 case BUILT_IN_STRCPY_CHK:
11014 case BUILT_IN_STPCPY_CHK:
11015 srcstr = CALL_EXPR_ARG (exp, 1);
11016 objsize = CALL_EXPR_ARG (exp, 2);
11017 break;
11019 case BUILT_IN_STRCAT_CHK:
11020 /* For __strcat_chk the warning will be emitted only if overflowing
11021 by at least strlen (dest) + 1 bytes. */
11022 catstr = CALL_EXPR_ARG (exp, 0);
11023 srcstr = CALL_EXPR_ARG (exp, 1);
11024 objsize = CALL_EXPR_ARG (exp, 2);
11025 break;
11027 case BUILT_IN_STRNCAT_CHK:
11028 catstr = CALL_EXPR_ARG (exp, 0);
11029 srcstr = CALL_EXPR_ARG (exp, 1);
11030 maxread = CALL_EXPR_ARG (exp, 2);
11031 objsize = CALL_EXPR_ARG (exp, 3);
11032 break;
11034 case BUILT_IN_STRNCPY_CHK:
11035 case BUILT_IN_STPNCPY_CHK:
11036 srcstr = CALL_EXPR_ARG (exp, 1);
11037 size = CALL_EXPR_ARG (exp, 2);
11038 objsize = CALL_EXPR_ARG (exp, 3);
11039 break;
11041 case BUILT_IN_SNPRINTF_CHK:
11042 case BUILT_IN_VSNPRINTF_CHK:
11043 maxread = CALL_EXPR_ARG (exp, 1);
11044 objsize = CALL_EXPR_ARG (exp, 3);
11045 /* The only checked access the write to the destination. */
11046 mode = access_write_only;
11047 break;
11048 default:
11049 gcc_unreachable ();
11052 if (catstr && maxread)
11054 /* Check __strncat_chk. There is no way to determine the length
11055 of the string to which the source string is being appended so
11056 just warn when the length of the source string is not known. */
11057 check_strncat_sizes (exp, objsize);
11058 return;
11061 check_access (exp, size, maxread, srcstr, objsize, mode);
11064 /* Emit warning if a buffer overflow is detected at compile time
11065 in __sprintf_chk/__vsprintf_chk calls. */
11067 static void
11068 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11070 tree size, len, fmt;
11071 const char *fmt_str;
11072 int nargs = call_expr_nargs (exp);
11074 /* Verify the required arguments in the original call. */
11076 if (nargs < 4)
11077 return;
11078 size = CALL_EXPR_ARG (exp, 2);
11079 fmt = CALL_EXPR_ARG (exp, 3);
11081 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11082 return;
11084 /* Check whether the format is a literal string constant. */
11085 fmt_str = c_getstr (fmt);
11086 if (fmt_str == NULL)
11087 return;
11089 if (!init_target_chars ())
11090 return;
11092 /* If the format doesn't contain % args or %%, we know its size. */
11093 if (strchr (fmt_str, target_percent) == 0)
11094 len = build_int_cstu (size_type_node, strlen (fmt_str));
11095 /* If the format is "%s" and first ... argument is a string literal,
11096 we know it too. */
11097 else if (fcode == BUILT_IN_SPRINTF_CHK
11098 && strcmp (fmt_str, target_percent_s) == 0)
11100 tree arg;
11102 if (nargs < 5)
11103 return;
11104 arg = CALL_EXPR_ARG (exp, 4);
11105 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11106 return;
11108 len = c_strlen (arg, 1);
11109 if (!len || ! tree_fits_uhwi_p (len))
11110 return;
11112 else
11113 return;
11115 /* Add one for the terminating nul. */
11116 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11118 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11119 access_write_only);
11122 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11123 if possible. */
11125 static tree
11126 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11128 tree bytes;
11129 int object_size_type;
11131 if (!validate_arg (ptr, POINTER_TYPE)
11132 || !validate_arg (ost, INTEGER_TYPE))
11133 return NULL_TREE;
11135 STRIP_NOPS (ost);
11137 if (TREE_CODE (ost) != INTEGER_CST
11138 || tree_int_cst_sgn (ost) < 0
11139 || compare_tree_int (ost, 3) > 0)
11140 return NULL_TREE;
11142 object_size_type = tree_to_shwi (ost);
11144 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11145 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11146 and (size_t) 0 for types 2 and 3. */
11147 if (TREE_SIDE_EFFECTS (ptr))
11148 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11150 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11151 object_size_type |= OST_DYNAMIC;
11153 if (TREE_CODE (ptr) == ADDR_EXPR)
11155 compute_builtin_object_size (ptr, object_size_type, &bytes);
11156 if ((object_size_type & OST_DYNAMIC)
11157 || int_fits_type_p (bytes, size_type_node))
11158 return fold_convert (size_type_node, bytes);
11160 else if (TREE_CODE (ptr) == SSA_NAME)
11162 /* If object size is not known yet, delay folding until
11163 later. Maybe subsequent passes will help determining
11164 it. */
11165 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11166 && ((object_size_type & OST_DYNAMIC)
11167 || int_fits_type_p (bytes, size_type_node)))
11168 return fold_convert (size_type_node, bytes);
11171 return NULL_TREE;
11174 /* Builtins with folding operations that operate on "..." arguments
11175 need special handling; we need to store the arguments in a convenient
11176 data structure before attempting any folding. Fortunately there are
11177 only a few builtins that fall into this category. FNDECL is the
11178 function, EXP is the CALL_EXPR for the call. */
11180 static tree
11181 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11183 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11184 tree ret = NULL_TREE;
11186 switch (fcode)
11188 case BUILT_IN_FPCLASSIFY:
11189 ret = fold_builtin_fpclassify (loc, args, nargs);
11190 break;
11192 case BUILT_IN_ADDC:
11193 case BUILT_IN_ADDCL:
11194 case BUILT_IN_ADDCLL:
11195 case BUILT_IN_SUBC:
11196 case BUILT_IN_SUBCL:
11197 case BUILT_IN_SUBCLL:
11198 return fold_builtin_addc_subc (loc, fcode, args);
11200 default:
11201 break;
11203 if (ret)
11205 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11206 SET_EXPR_LOCATION (ret, loc);
11207 suppress_warning (ret);
11208 return ret;
11210 return NULL_TREE;
11213 /* Initialize format string characters in the target charset. */
11215 bool
11216 init_target_chars (void)
11218 static bool init;
11219 if (!init)
11221 target_newline = lang_hooks.to_target_charset ('\n');
11222 target_percent = lang_hooks.to_target_charset ('%');
11223 target_c = lang_hooks.to_target_charset ('c');
11224 target_s = lang_hooks.to_target_charset ('s');
11225 if (target_newline == 0 || target_percent == 0 || target_c == 0
11226 || target_s == 0)
11227 return false;
11229 target_percent_c[0] = target_percent;
11230 target_percent_c[1] = target_c;
11231 target_percent_c[2] = '\0';
11233 target_percent_s[0] = target_percent;
11234 target_percent_s[1] = target_s;
11235 target_percent_s[2] = '\0';
11237 target_percent_s_newline[0] = target_percent;
11238 target_percent_s_newline[1] = target_s;
11239 target_percent_s_newline[2] = target_newline;
11240 target_percent_s_newline[3] = '\0';
11242 init = true;
11244 return true;
11247 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11248 and no overflow/underflow occurred. INEXACT is true if M was not
11249 exactly calculated. TYPE is the tree type for the result. This
11250 function assumes that you cleared the MPFR flags and then
11251 calculated M to see if anything subsequently set a flag prior to
11252 entering this function. Return NULL_TREE if any checks fail. */
11254 static tree
11255 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11257 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11258 overflow/underflow occurred. If -frounding-math, proceed iff the
11259 result of calling FUNC was exact. */
11260 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11261 && (!flag_rounding_math || !inexact))
11263 REAL_VALUE_TYPE rr;
11265 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11266 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11267 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11268 but the mpfr_t is not, then we underflowed in the
11269 conversion. */
11270 if (real_isfinite (&rr)
11271 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11273 REAL_VALUE_TYPE rmode;
11275 real_convert (&rmode, TYPE_MODE (type), &rr);
11276 /* Proceed iff the specified mode can hold the value. */
11277 if (real_identical (&rmode, &rr))
11278 return build_real (type, rmode);
11281 return NULL_TREE;
11284 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11285 number and no overflow/underflow occurred. INEXACT is true if M
11286 was not exactly calculated. TYPE is the tree type for the result.
11287 This function assumes that you cleared the MPFR flags and then
11288 calculated M to see if anything subsequently set a flag prior to
11289 entering this function. Return NULL_TREE if any checks fail, if
11290 FORCE_CONVERT is true, then bypass the checks. */
11292 static tree
11293 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11295 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11296 overflow/underflow occurred. If -frounding-math, proceed iff the
11297 result of calling FUNC was exact. */
11298 if (force_convert
11299 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11300 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11301 && (!flag_rounding_math || !inexact)))
11303 REAL_VALUE_TYPE re, im;
11305 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11306 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11307 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11308 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11309 but the mpfr_t is not, then we underflowed in the
11310 conversion. */
11311 if (force_convert
11312 || (real_isfinite (&re) && real_isfinite (&im)
11313 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11314 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11316 REAL_VALUE_TYPE re_mode, im_mode;
11318 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11319 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11320 /* Proceed iff the specified mode can hold the value. */
11321 if (force_convert
11322 || (real_identical (&re_mode, &re)
11323 && real_identical (&im_mode, &im)))
11324 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11325 build_real (TREE_TYPE (type), im_mode));
11328 return NULL_TREE;
11331 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11332 the pointer *(ARG_QUO) and return the result. The type is taken
11333 from the type of ARG0 and is used for setting the precision of the
11334 calculation and results. */
11336 static tree
11337 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11339 tree const type = TREE_TYPE (arg0);
11340 tree result = NULL_TREE;
11342 STRIP_NOPS (arg0);
11343 STRIP_NOPS (arg1);
11345 /* To proceed, MPFR must exactly represent the target floating point
11346 format, which only happens when the target base equals two. */
11347 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11348 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11349 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11351 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11352 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11354 if (real_isfinite (ra0) && real_isfinite (ra1))
11356 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11357 const int prec = fmt->p;
11358 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11359 tree result_rem;
11360 long integer_quo;
11361 mpfr_t m0, m1;
11363 mpfr_inits2 (prec, m0, m1, NULL);
11364 mpfr_from_real (m0, ra0, MPFR_RNDN);
11365 mpfr_from_real (m1, ra1, MPFR_RNDN);
11366 mpfr_clear_flags ();
11367 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11368 /* Remquo is independent of the rounding mode, so pass
11369 inexact=0 to do_mpfr_ckconv(). */
11370 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11371 mpfr_clears (m0, m1, NULL);
11372 if (result_rem)
11374 /* MPFR calculates quo in the host's long so it may
11375 return more bits in quo than the target int can hold
11376 if sizeof(host long) > sizeof(target int). This can
11377 happen even for native compilers in LP64 mode. In
11378 these cases, modulo the quo value with the largest
11379 number that the target int can hold while leaving one
11380 bit for the sign. */
11381 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11382 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11384 /* Dereference the quo pointer argument. */
11385 arg_quo = build_fold_indirect_ref (arg_quo);
11386 /* Proceed iff a valid pointer type was passed in. */
11387 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11389 /* Set the value. */
11390 tree result_quo
11391 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11392 build_int_cst (TREE_TYPE (arg_quo),
11393 integer_quo));
11394 TREE_SIDE_EFFECTS (result_quo) = 1;
11395 /* Combine the quo assignment with the rem. */
11396 result = fold_build2 (COMPOUND_EXPR, type,
11397 result_quo, result_rem);
11398 suppress_warning (result, OPT_Wunused_value);
11399 result = non_lvalue (result);
11404 return result;
11407 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11408 resulting value as a tree with type TYPE. The mpfr precision is
11409 set to the precision of TYPE. We assume that this mpfr function
11410 returns zero if the result could be calculated exactly within the
11411 requested precision. In addition, the integer pointer represented
11412 by ARG_SG will be dereferenced and set to the appropriate signgam
11413 (-1,1) value. */
11415 static tree
11416 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11418 tree result = NULL_TREE;
11420 STRIP_NOPS (arg);
11422 /* To proceed, MPFR must exactly represent the target floating point
11423 format, which only happens when the target base equals two. Also
11424 verify ARG is a constant and that ARG_SG is an int pointer. */
11425 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11426 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11427 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11428 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11430 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11432 /* In addition to NaN and Inf, the argument cannot be zero or a
11433 negative integer. */
11434 if (real_isfinite (ra)
11435 && ra->cl != rvc_zero
11436 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11438 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11439 const int prec = fmt->p;
11440 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11441 int inexact, sg;
11442 tree result_lg;
11444 auto_mpfr m (prec);
11445 mpfr_from_real (m, ra, MPFR_RNDN);
11446 mpfr_clear_flags ();
11447 inexact = mpfr_lgamma (m, &sg, m, rnd);
11448 result_lg = do_mpfr_ckconv (m, type, inexact);
11449 if (result_lg)
11451 tree result_sg;
11453 /* Dereference the arg_sg pointer argument. */
11454 arg_sg = build_fold_indirect_ref (arg_sg);
11455 /* Assign the signgam value into *arg_sg. */
11456 result_sg = fold_build2 (MODIFY_EXPR,
11457 TREE_TYPE (arg_sg), arg_sg,
11458 build_int_cst (TREE_TYPE (arg_sg), sg));
11459 TREE_SIDE_EFFECTS (result_sg) = 1;
11460 /* Combine the signgam assignment with the lgamma result. */
11461 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11462 result_sg, result_lg));
11467 return result;
11470 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11471 mpc function FUNC on it and return the resulting value as a tree
11472 with type TYPE. The mpfr precision is set to the precision of
11473 TYPE. We assume that function FUNC returns zero if the result
11474 could be calculated exactly within the requested precision. If
11475 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11476 in the arguments and/or results. */
11478 tree
11479 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11480 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11482 tree result = NULL_TREE;
11484 STRIP_NOPS (arg0);
11485 STRIP_NOPS (arg1);
11487 /* To proceed, MPFR must exactly represent the target floating point
11488 format, which only happens when the target base equals two. */
11489 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11490 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
11491 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11492 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
11493 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11495 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11496 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11497 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11498 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11500 if (do_nonfinite
11501 || (real_isfinite (re0) && real_isfinite (im0)
11502 && real_isfinite (re1) && real_isfinite (im1)))
11504 const struct real_format *const fmt =
11505 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11506 const int prec = fmt->p;
11507 const mpfr_rnd_t rnd = fmt->round_towards_zero
11508 ? MPFR_RNDZ : MPFR_RNDN;
11509 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11510 int inexact;
11511 mpc_t m0, m1;
11513 mpc_init2 (m0, prec);
11514 mpc_init2 (m1, prec);
11515 mpfr_from_real (mpc_realref (m0), re0, rnd);
11516 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11517 mpfr_from_real (mpc_realref (m1), re1, rnd);
11518 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11519 mpfr_clear_flags ();
11520 inexact = func (m0, m0, m1, crnd);
11521 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11522 mpc_clear (m0);
11523 mpc_clear (m1);
11527 return result;
11530 /* A wrapper function for builtin folding that prevents warnings for
11531 "statement without effect" and the like, caused by removing the
11532 call node earlier than the warning is generated. */
11534 tree
11535 fold_call_stmt (gcall *stmt, bool ignore)
11537 tree ret = NULL_TREE;
11538 tree fndecl = gimple_call_fndecl (stmt);
11539 location_t loc = gimple_location (stmt);
11540 if (fndecl && fndecl_built_in_p (fndecl)
11541 && !gimple_call_va_arg_pack_p (stmt))
11543 int nargs = gimple_call_num_args (stmt);
11544 tree *args = (nargs > 0
11545 ? gimple_call_arg_ptr (stmt, 0)
11546 : &error_mark_node);
11548 if (avoid_folding_inline_builtin (fndecl))
11549 return NULL_TREE;
11550 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11552 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11554 else
11556 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11557 if (ret)
11559 /* Propagate location information from original call to
11560 expansion of builtin. Otherwise things like
11561 maybe_emit_chk_warning, that operate on the expansion
11562 of a builtin, will use the wrong location information. */
11563 if (gimple_has_location (stmt))
11565 tree realret = ret;
11566 if (TREE_CODE (ret) == NOP_EXPR)
11567 realret = TREE_OPERAND (ret, 0);
11568 if (CAN_HAVE_LOCATION_P (realret)
11569 && !EXPR_HAS_LOCATION (realret))
11570 SET_EXPR_LOCATION (realret, loc);
11571 return realret;
11573 return ret;
11577 return NULL_TREE;
11580 /* Look up the function in builtin_decl that corresponds to DECL
11581 and set ASMSPEC as its user assembler name. DECL must be a
11582 function decl that declares a builtin. */
11584 void
11585 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11587 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11588 && asmspec != 0);
11590 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11591 set_user_assembler_name (builtin, asmspec);
11593 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11594 && INT_TYPE_SIZE < BITS_PER_WORD)
11596 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11597 set_user_assembler_libfunc ("ffs", asmspec);
11598 set_optab_libfunc (ffs_optab, mode, "ffs");
11602 /* Return true if DECL is a builtin that expands to a constant or similarly
11603 simple code. */
11604 bool
11605 is_simple_builtin (tree decl)
11607 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11608 switch (DECL_FUNCTION_CODE (decl))
11610 /* Builtins that expand to constants. */
11611 case BUILT_IN_CONSTANT_P:
11612 case BUILT_IN_EXPECT:
11613 case BUILT_IN_OBJECT_SIZE:
11614 case BUILT_IN_UNREACHABLE:
11615 /* Simple register moves or loads from stack. */
11616 case BUILT_IN_ASSUME_ALIGNED:
11617 case BUILT_IN_RETURN_ADDRESS:
11618 case BUILT_IN_EXTRACT_RETURN_ADDR:
11619 case BUILT_IN_FROB_RETURN_ADDR:
11620 case BUILT_IN_RETURN:
11621 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11622 case BUILT_IN_FRAME_ADDRESS:
11623 case BUILT_IN_VA_END:
11624 case BUILT_IN_STACK_SAVE:
11625 case BUILT_IN_STACK_RESTORE:
11626 case BUILT_IN_DWARF_CFA:
11627 /* Exception state returns or moves registers around. */
11628 case BUILT_IN_EH_FILTER:
11629 case BUILT_IN_EH_POINTER:
11630 case BUILT_IN_EH_COPY_VALUES:
11631 return true;
11633 default:
11634 return false;
11637 return false;
11640 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11641 most probably expanded inline into reasonably simple code. This is a
11642 superset of is_simple_builtin. */
11643 bool
11644 is_inexpensive_builtin (tree decl)
11646 if (!decl)
11647 return false;
11648 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11649 return true;
11650 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11651 switch (DECL_FUNCTION_CODE (decl))
11653 case BUILT_IN_ABS:
11654 CASE_BUILT_IN_ALLOCA:
11655 case BUILT_IN_BSWAP16:
11656 case BUILT_IN_BSWAP32:
11657 case BUILT_IN_BSWAP64:
11658 case BUILT_IN_BSWAP128:
11659 case BUILT_IN_CLZ:
11660 case BUILT_IN_CLZIMAX:
11661 case BUILT_IN_CLZL:
11662 case BUILT_IN_CLZLL:
11663 case BUILT_IN_CTZ:
11664 case BUILT_IN_CTZIMAX:
11665 case BUILT_IN_CTZL:
11666 case BUILT_IN_CTZLL:
11667 case BUILT_IN_FFS:
11668 case BUILT_IN_FFSIMAX:
11669 case BUILT_IN_FFSL:
11670 case BUILT_IN_FFSLL:
11671 case BUILT_IN_IMAXABS:
11672 case BUILT_IN_FINITE:
11673 case BUILT_IN_FINITEF:
11674 case BUILT_IN_FINITEL:
11675 case BUILT_IN_FINITED32:
11676 case BUILT_IN_FINITED64:
11677 case BUILT_IN_FINITED128:
11678 case BUILT_IN_FPCLASSIFY:
11679 case BUILT_IN_ISFINITE:
11680 case BUILT_IN_ISINF_SIGN:
11681 case BUILT_IN_ISINF:
11682 case BUILT_IN_ISINFF:
11683 case BUILT_IN_ISINFL:
11684 case BUILT_IN_ISINFD32:
11685 case BUILT_IN_ISINFD64:
11686 case BUILT_IN_ISINFD128:
11687 case BUILT_IN_ISNAN:
11688 case BUILT_IN_ISNANF:
11689 case BUILT_IN_ISNANL:
11690 case BUILT_IN_ISNAND32:
11691 case BUILT_IN_ISNAND64:
11692 case BUILT_IN_ISNAND128:
11693 case BUILT_IN_ISNORMAL:
11694 case BUILT_IN_ISGREATER:
11695 case BUILT_IN_ISGREATEREQUAL:
11696 case BUILT_IN_ISLESS:
11697 case BUILT_IN_ISLESSEQUAL:
11698 case BUILT_IN_ISLESSGREATER:
11699 case BUILT_IN_ISUNORDERED:
11700 case BUILT_IN_ISEQSIG:
11701 case BUILT_IN_VA_ARG_PACK:
11702 case BUILT_IN_VA_ARG_PACK_LEN:
11703 case BUILT_IN_VA_COPY:
11704 case BUILT_IN_TRAP:
11705 case BUILT_IN_UNREACHABLE_TRAP:
11706 case BUILT_IN_SAVEREGS:
11707 case BUILT_IN_POPCOUNTL:
11708 case BUILT_IN_POPCOUNTLL:
11709 case BUILT_IN_POPCOUNTIMAX:
11710 case BUILT_IN_POPCOUNT:
11711 case BUILT_IN_PARITYL:
11712 case BUILT_IN_PARITYLL:
11713 case BUILT_IN_PARITYIMAX:
11714 case BUILT_IN_PARITY:
11715 case BUILT_IN_LABS:
11716 case BUILT_IN_LLABS:
11717 case BUILT_IN_PREFETCH:
11718 case BUILT_IN_ACC_ON_DEVICE:
11719 return true;
11721 default:
11722 return is_simple_builtin (decl);
11725 return false;
11728 /* Return true if T is a constant and the value cast to a target char
11729 can be represented by a host char.
11730 Store the casted char constant in *P if so. */
11732 bool
11733 target_char_cst_p (tree t, char *p)
11735 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11736 return false;
11738 *p = (char)tree_to_uhwi (t);
11739 return true;
11742 /* Return true if the builtin DECL is implemented in a standard library.
11743 Otherwise return false which doesn't guarantee it is not (thus the list
11744 of handled builtins below may be incomplete). */
11746 bool
11747 builtin_with_linkage_p (tree decl)
11749 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11750 switch (DECL_FUNCTION_CODE (decl))
11752 CASE_FLT_FN (BUILT_IN_ACOS):
11753 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
11754 CASE_FLT_FN (BUILT_IN_ACOSH):
11755 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
11756 CASE_FLT_FN (BUILT_IN_ASIN):
11757 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
11758 CASE_FLT_FN (BUILT_IN_ASINH):
11759 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
11760 CASE_FLT_FN (BUILT_IN_ATAN):
11761 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
11762 CASE_FLT_FN (BUILT_IN_ATANH):
11763 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
11764 CASE_FLT_FN (BUILT_IN_ATAN2):
11765 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
11766 CASE_FLT_FN (BUILT_IN_CBRT):
11767 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
11768 CASE_FLT_FN (BUILT_IN_CEIL):
11769 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11770 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11771 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11772 CASE_FLT_FN (BUILT_IN_COS):
11773 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
11774 CASE_FLT_FN (BUILT_IN_COSH):
11775 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
11776 CASE_FLT_FN (BUILT_IN_ERF):
11777 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
11778 CASE_FLT_FN (BUILT_IN_ERFC):
11779 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
11780 CASE_FLT_FN (BUILT_IN_EXP):
11781 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
11782 CASE_FLT_FN (BUILT_IN_EXP2):
11783 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
11784 CASE_FLT_FN (BUILT_IN_EXPM1):
11785 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
11786 CASE_FLT_FN (BUILT_IN_FABS):
11787 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11788 CASE_FLT_FN (BUILT_IN_FDIM):
11789 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
11790 CASE_FLT_FN (BUILT_IN_FLOOR):
11791 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11792 CASE_FLT_FN (BUILT_IN_FMA):
11793 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11794 CASE_FLT_FN (BUILT_IN_FMAX):
11795 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11796 CASE_FLT_FN (BUILT_IN_FMIN):
11797 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11798 CASE_FLT_FN (BUILT_IN_FMOD):
11799 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
11800 CASE_FLT_FN (BUILT_IN_FREXP):
11801 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
11802 CASE_FLT_FN (BUILT_IN_HYPOT):
11803 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
11804 CASE_FLT_FN (BUILT_IN_ILOGB):
11805 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
11806 CASE_FLT_FN (BUILT_IN_LDEXP):
11807 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
11808 CASE_FLT_FN (BUILT_IN_LGAMMA):
11809 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
11810 CASE_FLT_FN (BUILT_IN_LLRINT):
11811 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
11812 CASE_FLT_FN (BUILT_IN_LLROUND):
11813 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
11814 CASE_FLT_FN (BUILT_IN_LOG):
11815 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
11816 CASE_FLT_FN (BUILT_IN_LOG10):
11817 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
11818 CASE_FLT_FN (BUILT_IN_LOG1P):
11819 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
11820 CASE_FLT_FN (BUILT_IN_LOG2):
11821 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
11822 CASE_FLT_FN (BUILT_IN_LOGB):
11823 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
11824 CASE_FLT_FN (BUILT_IN_LRINT):
11825 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
11826 CASE_FLT_FN (BUILT_IN_LROUND):
11827 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
11828 CASE_FLT_FN (BUILT_IN_MODF):
11829 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
11830 CASE_FLT_FN (BUILT_IN_NAN):
11831 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
11832 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11833 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11834 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11835 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
11836 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11837 CASE_FLT_FN (BUILT_IN_POW):
11838 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
11839 CASE_FLT_FN (BUILT_IN_REMAINDER):
11840 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
11841 CASE_FLT_FN (BUILT_IN_REMQUO):
11842 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
11843 CASE_FLT_FN (BUILT_IN_RINT):
11844 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11845 CASE_FLT_FN (BUILT_IN_ROUND):
11846 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11847 CASE_FLT_FN (BUILT_IN_SCALBLN):
11848 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
11849 CASE_FLT_FN (BUILT_IN_SCALBN):
11850 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
11851 CASE_FLT_FN (BUILT_IN_SIN):
11852 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
11853 CASE_FLT_FN (BUILT_IN_SINH):
11854 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
11855 CASE_FLT_FN (BUILT_IN_SINCOS):
11856 CASE_FLT_FN (BUILT_IN_SQRT):
11857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11858 CASE_FLT_FN (BUILT_IN_TAN):
11859 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
11860 CASE_FLT_FN (BUILT_IN_TANH):
11861 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
11862 CASE_FLT_FN (BUILT_IN_TGAMMA):
11863 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
11864 CASE_FLT_FN (BUILT_IN_TRUNC):
11865 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11866 return true;
11868 case BUILT_IN_STPCPY:
11869 case BUILT_IN_STPNCPY:
11870 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11871 by libiberty's stpcpy.c for MinGW targets so we need to return true
11872 in order to be able to build libiberty in LTO mode for them. */
11873 return true;
11875 default:
11876 break;
11878 return false;
11881 /* Return true if OFFRNG is bounded to a subrange of offset values
11882 valid for the largest possible object. */
11884 bool
11885 access_ref::offset_bounded () const
11887 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11888 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11889 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11892 /* If CALLEE has known side effects, fill in INFO and return true.
11893 See tree-ssa-structalias.cc:find_func_aliases
11894 for the list of builtins we might need to handle here. */
11896 attr_fnspec
11897 builtin_fnspec (tree callee)
11899 built_in_function code = DECL_FUNCTION_CODE (callee);
11901 switch (code)
11903 /* All the following functions read memory pointed to by
11904 their second argument and write memory pointed to by first
11905 argument.
11906 strcat/strncat additionally reads memory pointed to by the first
11907 argument. */
11908 case BUILT_IN_STRCAT:
11909 case BUILT_IN_STRCAT_CHK:
11910 return "1cW 1 ";
11911 case BUILT_IN_STRNCAT:
11912 case BUILT_IN_STRNCAT_CHK:
11913 return "1cW 13";
11914 case BUILT_IN_STRCPY:
11915 case BUILT_IN_STRCPY_CHK:
11916 return "1cO 1 ";
11917 case BUILT_IN_STPCPY:
11918 case BUILT_IN_STPCPY_CHK:
11919 return ".cO 1 ";
11920 case BUILT_IN_STRNCPY:
11921 case BUILT_IN_MEMCPY:
11922 case BUILT_IN_MEMMOVE:
11923 case BUILT_IN_TM_MEMCPY:
11924 case BUILT_IN_TM_MEMMOVE:
11925 case BUILT_IN_STRNCPY_CHK:
11926 case BUILT_IN_MEMCPY_CHK:
11927 case BUILT_IN_MEMMOVE_CHK:
11928 return "1cO313";
11929 case BUILT_IN_MEMPCPY:
11930 case BUILT_IN_MEMPCPY_CHK:
11931 return ".cO313";
11932 case BUILT_IN_STPNCPY:
11933 case BUILT_IN_STPNCPY_CHK:
11934 return ".cO313";
11935 case BUILT_IN_BCOPY:
11936 return ".c23O3";
11937 case BUILT_IN_BZERO:
11938 return ".cO2";
11939 case BUILT_IN_MEMCMP:
11940 case BUILT_IN_MEMCMP_EQ:
11941 case BUILT_IN_BCMP:
11942 case BUILT_IN_STRNCMP:
11943 case BUILT_IN_STRNCMP_EQ:
11944 case BUILT_IN_STRNCASECMP:
11945 return ".cR3R3";
11947 /* The following functions read memory pointed to by their
11948 first argument. */
11949 CASE_BUILT_IN_TM_LOAD (1):
11950 CASE_BUILT_IN_TM_LOAD (2):
11951 CASE_BUILT_IN_TM_LOAD (4):
11952 CASE_BUILT_IN_TM_LOAD (8):
11953 CASE_BUILT_IN_TM_LOAD (FLOAT):
11954 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11955 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11956 CASE_BUILT_IN_TM_LOAD (M64):
11957 CASE_BUILT_IN_TM_LOAD (M128):
11958 CASE_BUILT_IN_TM_LOAD (M256):
11959 case BUILT_IN_TM_LOG:
11960 case BUILT_IN_TM_LOG_1:
11961 case BUILT_IN_TM_LOG_2:
11962 case BUILT_IN_TM_LOG_4:
11963 case BUILT_IN_TM_LOG_8:
11964 case BUILT_IN_TM_LOG_FLOAT:
11965 case BUILT_IN_TM_LOG_DOUBLE:
11966 case BUILT_IN_TM_LOG_LDOUBLE:
11967 case BUILT_IN_TM_LOG_M64:
11968 case BUILT_IN_TM_LOG_M128:
11969 case BUILT_IN_TM_LOG_M256:
11970 return ".cR ";
11972 case BUILT_IN_INDEX:
11973 case BUILT_IN_RINDEX:
11974 case BUILT_IN_STRCHR:
11975 case BUILT_IN_STRLEN:
11976 case BUILT_IN_STRRCHR:
11977 return ".cR ";
11978 case BUILT_IN_STRNLEN:
11979 return ".cR2";
11981 /* These read memory pointed to by the first argument.
11982 Allocating memory does not have any side-effects apart from
11983 being the definition point for the pointer.
11984 Unix98 specifies that errno is set on allocation failure. */
11985 case BUILT_IN_STRDUP:
11986 return "mCR ";
11987 case BUILT_IN_STRNDUP:
11988 return "mCR2";
11989 /* Allocating memory does not have any side-effects apart from
11990 being the definition point for the pointer. */
11991 case BUILT_IN_MALLOC:
11992 case BUILT_IN_ALIGNED_ALLOC:
11993 case BUILT_IN_CALLOC:
11994 case BUILT_IN_GOMP_ALLOC:
11995 return "mC";
11996 CASE_BUILT_IN_ALLOCA:
11997 return "mc";
11998 /* These read memory pointed to by the first argument with size
11999 in the third argument. */
12000 case BUILT_IN_MEMCHR:
12001 return ".cR3";
12002 /* These read memory pointed to by the first and second arguments. */
12003 case BUILT_IN_STRSTR:
12004 case BUILT_IN_STRPBRK:
12005 case BUILT_IN_STRCASECMP:
12006 case BUILT_IN_STRCSPN:
12007 case BUILT_IN_STRSPN:
12008 case BUILT_IN_STRCMP:
12009 case BUILT_IN_STRCMP_EQ:
12010 return ".cR R ";
12011 /* Freeing memory kills the pointed-to memory. More importantly
12012 the call has to serve as a barrier for moving loads and stores
12013 across it. */
12014 case BUILT_IN_STACK_RESTORE:
12015 case BUILT_IN_FREE:
12016 case BUILT_IN_GOMP_FREE:
12017 return ".co ";
12018 case BUILT_IN_VA_END:
12019 return ".cO ";
12020 /* Realloc serves both as allocation point and deallocation point. */
12021 case BUILT_IN_REALLOC:
12022 return ".Cw ";
12023 case BUILT_IN_GAMMA_R:
12024 case BUILT_IN_GAMMAF_R:
12025 case BUILT_IN_GAMMAL_R:
12026 case BUILT_IN_LGAMMA_R:
12027 case BUILT_IN_LGAMMAF_R:
12028 case BUILT_IN_LGAMMAL_R:
12029 return ".C. Ot";
12030 case BUILT_IN_FREXP:
12031 case BUILT_IN_FREXPF:
12032 case BUILT_IN_FREXPL:
12033 case BUILT_IN_MODF:
12034 case BUILT_IN_MODFF:
12035 case BUILT_IN_MODFL:
12036 return ".c. Ot";
12037 case BUILT_IN_REMQUO:
12038 case BUILT_IN_REMQUOF:
12039 case BUILT_IN_REMQUOL:
12040 return ".c. . Ot";
12041 case BUILT_IN_SINCOS:
12042 case BUILT_IN_SINCOSF:
12043 case BUILT_IN_SINCOSL:
12044 return ".c. OtOt";
12045 case BUILT_IN_MEMSET:
12046 case BUILT_IN_MEMSET_CHK:
12047 case BUILT_IN_TM_MEMSET:
12048 return "1cO3";
12049 CASE_BUILT_IN_TM_STORE (1):
12050 CASE_BUILT_IN_TM_STORE (2):
12051 CASE_BUILT_IN_TM_STORE (4):
12052 CASE_BUILT_IN_TM_STORE (8):
12053 CASE_BUILT_IN_TM_STORE (FLOAT):
12054 CASE_BUILT_IN_TM_STORE (DOUBLE):
12055 CASE_BUILT_IN_TM_STORE (LDOUBLE):
12056 CASE_BUILT_IN_TM_STORE (M64):
12057 CASE_BUILT_IN_TM_STORE (M128):
12058 CASE_BUILT_IN_TM_STORE (M256):
12059 return ".cO ";
12060 case BUILT_IN_STACK_SAVE:
12061 case BUILT_IN_RETURN:
12062 case BUILT_IN_EH_POINTER:
12063 case BUILT_IN_EH_FILTER:
12064 case BUILT_IN_UNWIND_RESUME:
12065 case BUILT_IN_CXA_END_CLEANUP:
12066 case BUILT_IN_EH_COPY_VALUES:
12067 case BUILT_IN_FRAME_ADDRESS:
12068 case BUILT_IN_APPLY_ARGS:
12069 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12070 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12071 case BUILT_IN_PREFETCH:
12072 case BUILT_IN_DWARF_CFA:
12073 case BUILT_IN_RETURN_ADDRESS:
12074 return ".c";
12075 case BUILT_IN_ASSUME_ALIGNED:
12076 case BUILT_IN_EXPECT:
12077 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12078 return "1cX ";
12079 /* But posix_memalign stores a pointer into the memory pointed to
12080 by its first argument. */
12081 case BUILT_IN_POSIX_MEMALIGN:
12082 return ".cOt";
12084 default:
12085 return "";