[i386] Fold __builtin_ia32_shufpd to VEC_PERM_EXPR
[official-gcc.git] / gcc / builtins.c
blob3f32754c4d35fc34af7c53156d2a356f69a94a8f
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp,
129 memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
577 c_strlen_data lendata = { };
578 tree len = c_strlen (exp, 1, &lendata);
579 if (len == NULL_TREE && lendata.minlen && lendata.decl)
581 if (size)
583 len = lendata.minlen;
584 if (lendata.off)
586 /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 but not in a SSA_NAME + CST expression. */
588 if (TREE_CODE (lendata.off) == INTEGER_CST)
589 *exact = true;
590 else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
593 /* Subtract the offset from the size of the array. */
594 *exact = false;
595 tree temp = TREE_OPERAND (lendata.off, 1);
596 temp = fold_convert (ssizetype, temp);
597 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 else
600 *exact = false;
602 else
603 *exact = true;
605 *size = len;
607 return lendata.decl;
610 return NULL_TREE;
613 /* Compute the length of a null-terminated character string or wide
614 character string handling character sizes of 1, 2, and 4 bytes.
615 TREE_STRING_LENGTH is not the right way because it evaluates to
616 the size of the character array in bytes (as opposed to characters)
617 and because it can contain a zero byte in the middle.
619 ONLY_VALUE should be nonzero if the result is not going to be emitted
620 into the instruction stream and zero if it is going to be expanded.
621 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622 is returned, otherwise NULL, since
623 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624 evaluate the side-effects.
626 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627 accesses. Note that this implies the result is not going to be emitted
628 into the instruction stream.
630 Additional information about the string accessed may be recorded
631 in DATA. For example, if SRC references an unterminated string,
632 then the declaration will be stored in the DECL field. If the
633 length of the unterminated string can be determined, it'll be
634 stored in the LEN field. Note this length could well be different
635 than what a C strlen call would return.
637 ELTSIZE is 1 for normal single byte character strings, and 2 or
638 4 for wide characer strings. ELTSIZE is by default 1.
640 The value returned is of type `ssizetype'. */
642 tree
643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
645 /* If we were not passed a DATA pointer, then get one to a local
646 structure. That avoids having to check DATA for NULL before
647 each time we want to use it. */
648 c_strlen_data local_strlen_data = { };
649 if (!data)
650 data = &local_strlen_data;
652 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653 STRIP_NOPS (src);
654 if (TREE_CODE (src) == COND_EXPR
655 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
657 tree len1, len2;
659 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661 if (tree_int_cst_equal (len1, len2))
662 return len1;
665 if (TREE_CODE (src) == COMPOUND_EXPR
666 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
669 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
671 /* Offset from the beginning of the string in bytes. */
672 tree byteoff;
673 tree memsize;
674 tree decl;
675 src = string_constant (src, &byteoff, &memsize, &decl);
676 if (src == 0)
677 return NULL_TREE;
679 /* Determine the size of the string element. */
680 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681 return NULL_TREE;
683 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685 in case the latter is less than the size of the array, such as when
686 SRC refers to a short string literal used to initialize a large array.
687 In that case, the elements of the array after the terminating NUL are
688 all NUL. */
689 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690 strelts = strelts / eltsize;
692 if (!tree_fits_uhwi_p (memsize))
693 return NULL_TREE;
695 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
697 /* PTR can point to the byte representation of any string type, including
698 char* and wchar_t*. */
699 const char *ptr = TREE_STRING_POINTER (src);
701 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
703 /* The code below works only for single byte character types. */
704 if (eltsize != 1)
705 return NULL_TREE;
707 /* If the string has an internal NUL character followed by any
708 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 the offset to the following NUL if we don't know where to
710 start searching for it. */
711 unsigned len = string_length (ptr, eltsize, strelts);
713 /* Return when an embedded null character is found or none at all.
714 In the latter case, set the DECL/LEN field in the DATA structure
715 so that callers may examine them. */
716 if (len + 1 < strelts)
717 return NULL_TREE;
718 else if (len >= maxelts)
720 data->decl = decl;
721 data->off = byteoff;
722 data->minlen = ssize_int (len);
723 return NULL_TREE;
726 /* For empty strings the result should be zero. */
727 if (len == 0)
728 return ssize_int (0);
730 /* We don't know the starting offset, but we do know that the string
731 has no internal zero bytes. If the offset falls within the bounds
732 of the string subtract the offset from the length of the string,
733 and return that. Otherwise the length is zero. Take care to
734 use SAVE_EXPR in case the OFFSET has side-effects. */
735 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 : byteoff;
737 offsave = fold_convert_loc (loc, sizetype, offsave);
738 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 size_int (len));
740 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 offsave);
742 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 build_zero_cst (ssizetype));
747 /* Offset from the beginning of the string in elements. */
748 HOST_WIDE_INT eltoff;
750 /* We have a known offset into the string. Start searching there for
751 a null character if we can represent it as a single HOST_WIDE_INT. */
752 if (byteoff == 0)
753 eltoff = 0;
754 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755 eltoff = -1;
756 else
757 eltoff = tree_to_uhwi (byteoff) / eltsize;
759 /* If the offset is known to be out of bounds, warn, and call strlen at
760 runtime. */
761 if (eltoff < 0 || eltoff >= maxelts)
763 /* Suppress multiple warnings for propagated constant strings. */
764 if (only_value != 2
765 && !TREE_NO_WARNING (src)
766 && warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff))
769 TREE_NO_WARNING (src) = 1;
770 return NULL_TREE;
773 /* If eltoff is larger than strelts but less than maxelts the
774 string length is zero, since the excess memory will be zero. */
775 if (eltoff > strelts)
776 return ssize_int (0);
778 /* Use strlen to search for the first zero byte. Since any strings
779 constructed with build_string will have nulls appended, we win even
780 if we get handed something like (char[4])"abcd".
782 Since ELTOFF is our starting index into the string, no further
783 calculation is needed. */
784 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 strelts - eltoff);
787 /* Don't know what to return if there was no zero termination.
788 Ideally this would turn into a gcc_checking_assert over time.
789 Set DECL/LEN so callers can examine them. */
790 if (len >= maxelts - eltoff)
792 data->decl = decl;
793 data->off = byteoff;
794 data->minlen = ssize_int (len);
795 return NULL_TREE;
798 return ssize_int (len);
801 /* Return a constant integer corresponding to target reading
802 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
803 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804 are assumed to be zero, otherwise it reads as many characters
805 as needed. */
808 c_readstr (const char *str, scalar_int_mode mode,
809 bool null_terminated_p/*=true*/)
811 HOST_WIDE_INT ch;
812 unsigned int i, j;
813 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
815 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817 / HOST_BITS_PER_WIDE_INT;
819 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820 for (i = 0; i < len; i++)
821 tmp[i] = 0;
823 ch = 1;
824 for (i = 0; i < GET_MODE_SIZE (mode); i++)
826 j = i;
827 if (WORDS_BIG_ENDIAN)
828 j = GET_MODE_SIZE (mode) - i - 1;
829 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832 j *= BITS_PER_UNIT;
834 if (ch || !null_terminated_p)
835 ch = (unsigned char) str[i];
836 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
839 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840 return immed_wide_int_const (c, mode);
843 /* Cast a target constant CST to target CHAR and if that value fits into
844 host char type, return zero and put that value into variable pointed to by
845 P. */
847 static int
848 target_char_cast (tree cst, char *p)
850 unsigned HOST_WIDE_INT val, hostval;
852 if (TREE_CODE (cst) != INTEGER_CST
853 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854 return 1;
856 /* Do not care if it fits or not right here. */
857 val = TREE_INT_CST_LOW (cst);
859 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
862 hostval = val;
863 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
866 if (val != hostval)
867 return 1;
869 *p = hostval;
870 return 0;
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
877 static tree
878 builtin_save_expr (tree exp)
880 if (TREE_CODE (exp) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp) == 0
882 && (TREE_CODE (exp) == PARM_DECL
883 || (VAR_P (exp) && !TREE_STATIC (exp)))))
884 return exp;
886 return save_expr (exp);
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
893 static rtx
894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
896 int i;
897 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898 if (tem == NULL_RTX)
900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 tem = frame_pointer_rtx;
911 else
913 tem = hard_frame_pointer_rtx;
915 /* Tell reload not to eliminate the frame pointer. */
916 crtl->accesses_prior_frames = 1;
920 if (count > 0)
921 SETUP_FRAME_ADDRESSES ();
923 /* On the SPARC, the return address is not in the frame, it is in a
924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
927 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928 count--;
930 /* Scan back COUNT frames to the specified frame. */
931 for (i = 0; i < count; i++)
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
935 tem = DYNAMIC_CHAIN_ADDRESS (tem);
936 tem = memory_address (Pmode, tem);
937 tem = gen_frame_mem (Pmode, tem);
938 tem = copy_to_reg (tem);
941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
943 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944 return FRAME_ADDR_RTX (tem);
946 /* For __builtin_return_address, get the return address from that frame. */
947 #ifdef RETURN_ADDR_RTX
948 tem = RETURN_ADDR_RTX (count, tem);
949 #else
950 tem = memory_address (Pmode,
951 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952 tem = gen_frame_mem (Pmode, tem);
953 #endif
954 return tem;
957 /* Alias set used for setjmp buffer. */
958 static alias_set_type setjmp_alias_set = -1;
960 /* Construct the leading half of a __builtin_setjmp call. Control will
961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
964 void
965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
967 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 rtx stack_save;
969 rtx mem;
971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
974 buf_addr = convert_memory_address (Pmode, buf_addr);
976 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
982 mem = gen_rtx_MEM (Pmode, buf_addr);
983 set_mem_alias_set (mem, setjmp_alias_set);
984 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
986 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 GET_MODE_SIZE (Pmode))),
988 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (validize_mem (mem),
991 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
993 stack_save = gen_rtx_MEM (sa_mode,
994 plus_constant (Pmode, buf_addr,
995 2 * GET_MODE_SIZE (Pmode)));
996 set_mem_alias_set (stack_save, setjmp_alias_set);
997 emit_stack_save (SAVE_NONLOCAL, &stack_save);
999 /* If there is further processing to do, do it. */
1000 if (targetm.have_builtin_setjmp_setup ())
1001 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1003 /* We have a nonlocal label. */
1004 cfun->has_nonlocal_label = 1;
1007 /* Construct the trailing part of a __builtin_setjmp call. This is
1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1011 void
1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1014 rtx chain;
1016 /* Mark the FP as used when we get here, so we have to make sure it's
1017 marked as used by this function. */
1018 emit_use (hard_frame_pointer_rtx);
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
1022 chain = rtx_for_static_chain (current_function_decl, true);
1023 if (chain && REG_P (chain))
1024 emit_clobber (chain);
1026 /* Now put in the code to restore the frame pointer, and argument
1027 pointer, if needed. */
1028 if (! targetm.have_nonlocal_goto ())
1030 /* First adjust our frame pointer to its actual value. It was
1031 previously set to the start of the virtual area corresponding to
1032 the stacked variables when we branched here and now needs to be
1033 adjusted to the actual hardware fp value.
1035 Assignments to virtual registers are converted by
1036 instantiate_virtual_regs into the corresponding assignment
1037 to the underlying register (fp in this case) that makes
1038 the original assignment true.
1039 So the following insn will actually be decrementing fp by
1040 TARGET_STARTING_FRAME_OFFSET. */
1041 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1043 /* Restoring the frame pointer also modifies the hard frame pointer.
1044 Mark it used (so that the previous assignment remains live once
1045 the frame pointer is eliminated) and clobbered (to represent the
1046 implicit update from the assignment). */
1047 emit_use (hard_frame_pointer_rtx);
1048 emit_clobber (hard_frame_pointer_rtx);
1051 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1053 /* If the argument pointer can be eliminated in favor of the
1054 frame pointer, we don't need to restore it. We assume here
1055 that if such an elimination is present, it can always be used.
1056 This is the case on all known machines; if we don't make this
1057 assumption, we do unnecessary saving on many machines. */
1058 size_t i;
1059 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1061 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1062 if (elim_regs[i].from == ARG_POINTER_REGNUM
1063 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1064 break;
1066 if (i == ARRAY_SIZE (elim_regs))
1068 /* Now restore our arg pointer from the address at which it
1069 was saved in our stack frame. */
1070 emit_move_insn (crtl->args.internal_arg_pointer,
1071 copy_to_reg (get_arg_pointer_save_area ()));
1075 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1076 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1077 else if (targetm.have_nonlocal_goto_receiver ())
1078 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1079 else
1080 { /* Nothing */ }
1082 /* We must not allow the code we just generated to be reordered by
1083 scheduling. Specifically, the update of the frame pointer must
1084 happen immediately, not later. */
1085 emit_insn (gen_blockage ());
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089 all will be used on all machines). It operates similarly to the C
1090 library function of the same name, but is more efficient. Much of
1091 the code below is copied from the handling of non-local gotos. */
1093 static void
1094 expand_builtin_longjmp (rtx buf_addr, rtx value)
1096 rtx fp, lab, stack;
1097 rtx_insn *insn, *last;
1098 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1100 /* DRAP is needed for stack realign if longjmp is expanded to current
1101 function */
1102 if (SUPPORTS_STACK_ALIGNMENT)
1103 crtl->need_drap = true;
1105 if (setjmp_alias_set == -1)
1106 setjmp_alias_set = new_alias_set ();
1108 buf_addr = convert_memory_address (Pmode, buf_addr);
1110 buf_addr = force_reg (Pmode, buf_addr);
1112 /* We require that the user must pass a second argument of 1, because
1113 that is what builtin_setjmp will return. */
1114 gcc_assert (value == const1_rtx);
1116 last = get_last_insn ();
1117 if (targetm.have_builtin_longjmp ())
1118 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1119 else
1121 fp = gen_rtx_MEM (Pmode, buf_addr);
1122 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1123 GET_MODE_SIZE (Pmode)));
1125 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1126 2 * GET_MODE_SIZE (Pmode)));
1127 set_mem_alias_set (fp, setjmp_alias_set);
1128 set_mem_alias_set (lab, setjmp_alias_set);
1129 set_mem_alias_set (stack, setjmp_alias_set);
1131 /* Pick up FP, label, and SP from the block and jump. This code is
1132 from expand_goto in stmt.c; see there for detailed comments. */
1133 if (targetm.have_nonlocal_goto ())
1134 /* We have to pass a value to the nonlocal_goto pattern that will
1135 get copied into the static_chain pointer, but it does not matter
1136 what that value is, because builtin_setjmp does not use it. */
1137 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1138 else
1140 lab = copy_to_reg (lab);
1142 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145 /* Restore the frame pointer and stack pointer. We must use a
1146 temporary since the setjmp buffer may be a local. */
1147 fp = copy_to_reg (fp);
1148 emit_stack_restore (SAVE_NONLOCAL, stack);
1149 emit_move_insn (hard_frame_pointer_rtx, fp);
1151 emit_use (hard_frame_pointer_rtx);
1152 emit_use (stack_pointer_rtx);
1153 emit_indirect_jump (lab);
1157 /* Search backwards and mark the jump insn as a non-local goto.
1158 Note that this precludes the use of __builtin_longjmp to a
1159 __builtin_setjmp target in the same function. However, we've
1160 already cautioned the user that these functions are for
1161 internal exception handling use only. */
1162 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1164 gcc_assert (insn != last);
1166 if (JUMP_P (insn))
1168 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1169 break;
1171 else if (CALL_P (insn))
1172 break;
1176 static inline bool
1177 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1179 return (iter->i < iter->n);
1182 /* This function validates the types of a function call argument list
1183 against a specified list of tree_codes. If the last specifier is a 0,
1184 that represents an ellipsis, otherwise the last specifier must be a
1185 VOID_TYPE. */
1187 static bool
1188 validate_arglist (const_tree callexpr, ...)
1190 enum tree_code code;
1191 bool res = 0;
1192 va_list ap;
1193 const_call_expr_arg_iterator iter;
1194 const_tree arg;
1196 va_start (ap, callexpr);
1197 init_const_call_expr_arg_iterator (callexpr, &iter);
1199 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1200 tree fn = CALL_EXPR_FN (callexpr);
1201 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1203 for (unsigned argno = 1; ; ++argno)
1205 code = (enum tree_code) va_arg (ap, int);
1207 switch (code)
1209 case 0:
1210 /* This signifies an ellipses, any further arguments are all ok. */
1211 res = true;
1212 goto end;
1213 case VOID_TYPE:
1214 /* This signifies an endlink, if no arguments remain, return
1215 true, otherwise return false. */
1216 res = !more_const_call_expr_args_p (&iter);
1217 goto end;
1218 case POINTER_TYPE:
1219 /* The actual argument must be nonnull when either the whole
1220 called function has been declared nonnull, or when the formal
1221 argument corresponding to the actual argument has been. */
1222 if (argmap
1223 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1225 arg = next_const_call_expr_arg (&iter);
1226 if (!validate_arg (arg, code) || integer_zerop (arg))
1227 goto end;
1228 break;
1230 /* FALLTHRU */
1231 default:
1232 /* If no parameters remain or the parameter's code does not
1233 match the specified code, return false. Otherwise continue
1234 checking any remaining arguments. */
1235 arg = next_const_call_expr_arg (&iter);
1236 if (!validate_arg (arg, code))
1237 goto end;
1238 break;
1242 /* We need gotos here since we can only have one VA_CLOSE in a
1243 function. */
1244 end: ;
1245 va_end (ap);
1247 BITMAP_FREE (argmap);
1249 return res;
1252 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1253 and the address of the save area. */
1255 static rtx
1256 expand_builtin_nonlocal_goto (tree exp)
1258 tree t_label, t_save_area;
1259 rtx r_label, r_save_area, r_fp, r_sp;
1260 rtx_insn *insn;
1262 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1263 return NULL_RTX;
1265 t_label = CALL_EXPR_ARG (exp, 0);
1266 t_save_area = CALL_EXPR_ARG (exp, 1);
1268 r_label = expand_normal (t_label);
1269 r_label = convert_memory_address (Pmode, r_label);
1270 r_save_area = expand_normal (t_save_area);
1271 r_save_area = convert_memory_address (Pmode, r_save_area);
1272 /* Copy the address of the save location to a register just in case it was
1273 based on the frame pointer. */
1274 r_save_area = copy_to_reg (r_save_area);
1275 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1276 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1277 plus_constant (Pmode, r_save_area,
1278 GET_MODE_SIZE (Pmode)));
1280 crtl->has_nonlocal_goto = 1;
1282 /* ??? We no longer need to pass the static chain value, afaik. */
1283 if (targetm.have_nonlocal_goto ())
1284 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1285 else
1287 r_label = copy_to_reg (r_label);
1289 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1290 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1292 /* Restore the frame pointer and stack pointer. We must use a
1293 temporary since the setjmp buffer may be a local. */
1294 r_fp = copy_to_reg (r_fp);
1295 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1296 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1298 /* USE of hard_frame_pointer_rtx added for consistency;
1299 not clear if really needed. */
1300 emit_use (hard_frame_pointer_rtx);
1301 emit_use (stack_pointer_rtx);
1303 /* If the architecture is using a GP register, we must
1304 conservatively assume that the target function makes use of it.
1305 The prologue of functions with nonlocal gotos must therefore
1306 initialize the GP register to the appropriate value, and we
1307 must then make sure that this value is live at the point
1308 of the jump. (Note that this doesn't necessarily apply
1309 to targets with a nonlocal_goto pattern; they are free
1310 to implement it in their own way. Note also that this is
1311 a no-op if the GP register is a global invariant.) */
1312 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1313 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1314 emit_use (pic_offset_table_rtx);
1316 emit_indirect_jump (r_label);
1319 /* Search backwards to the jump insn and mark it as a
1320 non-local goto. */
1321 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1323 if (JUMP_P (insn))
1325 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1326 break;
1328 else if (CALL_P (insn))
1329 break;
1332 return const0_rtx;
1335 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1336 (not all will be used on all machines) that was passed to __builtin_setjmp.
1337 It updates the stack pointer in that block to the current value. This is
1338 also called directly by the SJLJ exception handling code. */
1340 void
1341 expand_builtin_update_setjmp_buf (rtx buf_addr)
1343 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1344 buf_addr = convert_memory_address (Pmode, buf_addr);
1345 rtx stack_save
1346 = gen_rtx_MEM (sa_mode,
1347 memory_address
1348 (sa_mode,
1349 plus_constant (Pmode, buf_addr,
1350 2 * GET_MODE_SIZE (Pmode))));
1352 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1355 /* Expand a call to __builtin_prefetch. For a target that does not support
1356 data prefetch, evaluate the memory address argument in case it has side
1357 effects. */
1359 static void
1360 expand_builtin_prefetch (tree exp)
1362 tree arg0, arg1, arg2;
1363 int nargs;
1364 rtx op0, op1, op2;
1366 if (!validate_arglist (exp, POINTER_TYPE, 0))
1367 return;
1369 arg0 = CALL_EXPR_ARG (exp, 0);
1371 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1372 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1373 locality). */
1374 nargs = call_expr_nargs (exp);
1375 if (nargs > 1)
1376 arg1 = CALL_EXPR_ARG (exp, 1);
1377 else
1378 arg1 = integer_zero_node;
1379 if (nargs > 2)
1380 arg2 = CALL_EXPR_ARG (exp, 2);
1381 else
1382 arg2 = integer_three_node;
1384 /* Argument 0 is an address. */
1385 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1387 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1388 if (TREE_CODE (arg1) != INTEGER_CST)
1390 error ("second argument to %<__builtin_prefetch%> must be a constant");
1391 arg1 = integer_zero_node;
1393 op1 = expand_normal (arg1);
1394 /* Argument 1 must be either zero or one. */
1395 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1397 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1398 " using zero");
1399 op1 = const0_rtx;
1402 /* Argument 2 (locality) must be a compile-time constant int. */
1403 if (TREE_CODE (arg2) != INTEGER_CST)
1405 error ("third argument to %<__builtin_prefetch%> must be a constant");
1406 arg2 = integer_zero_node;
1408 op2 = expand_normal (arg2);
1409 /* Argument 2 must be 0, 1, 2, or 3. */
1410 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1412 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1413 op2 = const0_rtx;
1416 if (targetm.have_prefetch ())
1418 struct expand_operand ops[3];
1420 create_address_operand (&ops[0], op0);
1421 create_integer_operand (&ops[1], INTVAL (op1));
1422 create_integer_operand (&ops[2], INTVAL (op2));
1423 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1424 return;
1427 /* Don't do anything with direct references to volatile memory, but
1428 generate code to handle other side effects. */
1429 if (!MEM_P (op0) && side_effects_p (op0))
1430 emit_insn (op0);
1433 /* Get a MEM rtx for expression EXP which is the address of an operand
1434 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1435 the maximum length of the block of memory that might be accessed or
1436 NULL if unknown. */
1438 static rtx
1439 get_memory_rtx (tree exp, tree len)
1441 tree orig_exp = exp;
1442 rtx addr, mem;
1444 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1445 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1446 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1447 exp = TREE_OPERAND (exp, 0);
1449 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1450 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1452 /* Get an expression we can use to find the attributes to assign to MEM.
1453 First remove any nops. */
1454 while (CONVERT_EXPR_P (exp)
1455 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1456 exp = TREE_OPERAND (exp, 0);
1458 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1459 (as builtin stringops may alias with anything). */
1460 exp = fold_build2 (MEM_REF,
1461 build_array_type (char_type_node,
1462 build_range_type (sizetype,
1463 size_one_node, len)),
1464 exp, build_int_cst (ptr_type_node, 0));
1466 /* If the MEM_REF has no acceptable address, try to get the base object
1467 from the original address we got, and build an all-aliasing
1468 unknown-sized access to that one. */
1469 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1470 set_mem_attributes (mem, exp, 0);
1471 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1472 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1473 0))))
1475 exp = build_fold_addr_expr (exp);
1476 exp = fold_build2 (MEM_REF,
1477 build_array_type (char_type_node,
1478 build_range_type (sizetype,
1479 size_zero_node,
1480 NULL)),
1481 exp, build_int_cst (ptr_type_node, 0));
1482 set_mem_attributes (mem, exp, 0);
1484 set_mem_alias_set (mem, 0);
1485 return mem;
1488 /* Built-in functions to perform an untyped call and return. */
1490 #define apply_args_mode \
1491 (this_target_builtins->x_apply_args_mode)
1492 #define apply_result_mode \
1493 (this_target_builtins->x_apply_result_mode)
1495 /* Return the size required for the block returned by __builtin_apply_args,
1496 and initialize apply_args_mode. */
1498 static int
1499 apply_args_size (void)
1501 static int size = -1;
1502 int align;
1503 unsigned int regno;
1505 /* The values computed by this function never change. */
1506 if (size < 0)
1508 /* The first value is the incoming arg-pointer. */
1509 size = GET_MODE_SIZE (Pmode);
1511 /* The second value is the structure value address unless this is
1512 passed as an "invisible" first argument. */
1513 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1514 size += GET_MODE_SIZE (Pmode);
1516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 if (FUNCTION_ARG_REGNO_P (regno))
1519 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1521 gcc_assert (mode != VOIDmode);
1523 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1524 if (size % align != 0)
1525 size = CEIL (size, align) * align;
1526 size += GET_MODE_SIZE (mode);
1527 apply_args_mode[regno] = mode;
1529 else
1531 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1534 return size;
1537 /* Return the size required for the block returned by __builtin_apply,
1538 and initialize apply_result_mode. */
1540 static int
1541 apply_result_size (void)
1543 static int size = -1;
1544 int align, regno;
1546 /* The values computed by this function never change. */
1547 if (size < 0)
1549 size = 0;
1551 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1552 if (targetm.calls.function_value_regno_p (regno))
1554 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1556 gcc_assert (mode != VOIDmode);
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 size += GET_MODE_SIZE (mode);
1562 apply_result_mode[regno] = mode;
1564 else
1565 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1567 /* Allow targets that use untyped_call and untyped_return to override
1568 the size so that machine-specific information can be stored here. */
1569 #ifdef APPLY_RESULT_SIZE
1570 size = APPLY_RESULT_SIZE;
1571 #endif
1573 return size;
1576 /* Create a vector describing the result block RESULT. If SAVEP is true,
1577 the result block is used to save the values; otherwise it is used to
1578 restore the values. */
1580 static rtx
1581 result_vector (int savep, rtx result)
1583 int regno, size, align, nelts;
1584 fixed_size_mode mode;
1585 rtx reg, mem;
1586 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1588 size = nelts = 0;
1589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 if ((mode = apply_result_mode[regno]) != VOIDmode)
1592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 if (size % align != 0)
1594 size = CEIL (size, align) * align;
1595 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1596 mem = adjust_address (result, mode, size);
1597 savevec[nelts++] = (savep
1598 ? gen_rtx_SET (mem, reg)
1599 : gen_rtx_SET (reg, mem));
1600 size += GET_MODE_SIZE (mode);
1602 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1605 /* Save the state required to perform an untyped call with the same
1606 arguments as were passed to the current function. */
1608 static rtx
1609 expand_builtin_apply_args_1 (void)
1611 rtx registers, tem;
1612 int size, align, regno;
1613 fixed_size_mode mode;
1614 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1616 /* Create a block where the arg-pointer, structure value address,
1617 and argument registers can be saved. */
1618 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1620 /* Walk past the arg-pointer and structure value address. */
1621 size = GET_MODE_SIZE (Pmode);
1622 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1623 size += GET_MODE_SIZE (Pmode);
1625 /* Save each register used in calling a function to the block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1633 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1635 emit_move_insn (adjust_address (registers, mode, size), tem);
1636 size += GET_MODE_SIZE (mode);
1639 /* Save the arg pointer to the block. */
1640 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1641 /* We need the pointer as the caller actually passed them to us, not
1642 as we might have pretended they were passed. Make sure it's a valid
1643 operand, as emit_move_insn isn't expected to handle a PLUS. */
1644 if (STACK_GROWS_DOWNWARD)
1646 = force_operand (plus_constant (Pmode, tem,
1647 crtl->args.pretend_args_size),
1648 NULL_RTX);
1649 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1651 size = GET_MODE_SIZE (Pmode);
1653 /* Save the structure value address unless this is passed as an
1654 "invisible" first argument. */
1655 if (struct_incoming_value)
1657 emit_move_insn (adjust_address (registers, Pmode, size),
1658 copy_to_reg (struct_incoming_value));
1659 size += GET_MODE_SIZE (Pmode);
1662 /* Return the address of the block. */
1663 return copy_addr_to_reg (XEXP (registers, 0));
1666 /* __builtin_apply_args returns block of memory allocated on
1667 the stack into which is stored the arg pointer, structure
1668 value address, static chain, and all the registers that might
1669 possibly be used in performing a function call. The code is
1670 moved to the start of the function so the incoming values are
1671 saved. */
1673 static rtx
1674 expand_builtin_apply_args (void)
1676 /* Don't do __builtin_apply_args more than once in a function.
1677 Save the result of the first call and reuse it. */
1678 if (apply_args_value != 0)
1679 return apply_args_value;
1681 /* When this function is called, it means that registers must be
1682 saved on entry to this function. So we migrate the
1683 call to the first insn of this function. */
1684 rtx temp;
1686 start_sequence ();
1687 temp = expand_builtin_apply_args_1 ();
1688 rtx_insn *seq = get_insns ();
1689 end_sequence ();
1691 apply_args_value = temp;
1693 /* Put the insns after the NOTE that starts the function.
1694 If this is inside a start_sequence, make the outer-level insn
1695 chain current, so the code is placed at the start of the
1696 function. If internal_arg_pointer is a non-virtual pseudo,
1697 it needs to be placed after the function that initializes
1698 that pseudo. */
1699 push_topmost_sequence ();
1700 if (REG_P (crtl->args.internal_arg_pointer)
1701 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1702 emit_insn_before (seq, parm_birth_insn);
1703 else
1704 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1705 pop_topmost_sequence ();
1706 return temp;
1710 /* Perform an untyped call and save the state required to perform an
1711 untyped return of whatever value was returned by the given function. */
1713 static rtx
1714 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1716 int size, align, regno;
1717 fixed_size_mode mode;
1718 rtx incoming_args, result, reg, dest, src;
1719 rtx_call_insn *call_insn;
1720 rtx old_stack_level = 0;
1721 rtx call_fusage = 0;
1722 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1724 arguments = convert_memory_address (Pmode, arguments);
1726 /* Create a block where the return registers can be saved. */
1727 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1729 /* Fetch the arg pointer from the ARGUMENTS block. */
1730 incoming_args = gen_reg_rtx (Pmode);
1731 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1732 if (!STACK_GROWS_DOWNWARD)
1733 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1734 incoming_args, 0, OPTAB_LIB_WIDEN);
1736 /* Push a new argument block and copy the arguments. Do not allow
1737 the (potential) memcpy call below to interfere with our stack
1738 manipulations. */
1739 do_pending_stack_adjust ();
1740 NO_DEFER_POP;
1742 /* Save the stack with nonlocal if available. */
1743 if (targetm.have_save_stack_nonlocal ())
1744 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1745 else
1746 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1748 /* Allocate a block of memory onto the stack and copy the memory
1749 arguments to the outgoing arguments address. We can pass TRUE
1750 as the 4th argument because we just saved the stack pointer
1751 and will restore it right after the call. */
1752 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1754 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1755 may have already set current_function_calls_alloca to true.
1756 current_function_calls_alloca won't be set if argsize is zero,
1757 so we have to guarantee need_drap is true here. */
1758 if (SUPPORTS_STACK_ALIGNMENT)
1759 crtl->need_drap = true;
1761 dest = virtual_outgoing_args_rtx;
1762 if (!STACK_GROWS_DOWNWARD)
1764 if (CONST_INT_P (argsize))
1765 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1766 else
1767 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1769 dest = gen_rtx_MEM (BLKmode, dest);
1770 set_mem_align (dest, PARM_BOUNDARY);
1771 src = gen_rtx_MEM (BLKmode, incoming_args);
1772 set_mem_align (src, PARM_BOUNDARY);
1773 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1775 /* Refer to the argument block. */
1776 apply_args_size ();
1777 arguments = gen_rtx_MEM (BLKmode, arguments);
1778 set_mem_align (arguments, PARM_BOUNDARY);
1780 /* Walk past the arg-pointer and structure value address. */
1781 size = GET_MODE_SIZE (Pmode);
1782 if (struct_value)
1783 size += GET_MODE_SIZE (Pmode);
1785 /* Restore each of the registers previously saved. Make USE insns
1786 for each of these registers for use in making the call. */
1787 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1788 if ((mode = apply_args_mode[regno]) != VOIDmode)
1790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1791 if (size % align != 0)
1792 size = CEIL (size, align) * align;
1793 reg = gen_rtx_REG (mode, regno);
1794 emit_move_insn (reg, adjust_address (arguments, mode, size));
1795 use_reg (&call_fusage, reg);
1796 size += GET_MODE_SIZE (mode);
1799 /* Restore the structure value address unless this is passed as an
1800 "invisible" first argument. */
1801 size = GET_MODE_SIZE (Pmode);
1802 if (struct_value)
1804 rtx value = gen_reg_rtx (Pmode);
1805 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1806 emit_move_insn (struct_value, value);
1807 if (REG_P (struct_value))
1808 use_reg (&call_fusage, struct_value);
1809 size += GET_MODE_SIZE (Pmode);
1812 /* All arguments and registers used for the call are set up by now! */
1813 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1815 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1816 and we don't want to load it into a register as an optimization,
1817 because prepare_call_address already did it if it should be done. */
1818 if (GET_CODE (function) != SYMBOL_REF)
1819 function = memory_address (FUNCTION_MODE, function);
1821 /* Generate the actual call instruction and save the return value. */
1822 if (targetm.have_untyped_call ())
1824 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1825 emit_call_insn (targetm.gen_untyped_call (mem, result,
1826 result_vector (1, result)));
1828 else if (targetm.have_call_value ())
1830 rtx valreg = 0;
1832 /* Locate the unique return register. It is not possible to
1833 express a call that sets more than one return register using
1834 call_value; use untyped_call for that. In fact, untyped_call
1835 only needs to save the return registers in the given block. */
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if ((mode = apply_result_mode[regno]) != VOIDmode)
1839 gcc_assert (!valreg); /* have_untyped_call required. */
1841 valreg = gen_rtx_REG (mode, regno);
1844 emit_insn (targetm.gen_call_value (valreg,
1845 gen_rtx_MEM (FUNCTION_MODE, function),
1846 const0_rtx, NULL_RTX, const0_rtx));
1848 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1850 else
1851 gcc_unreachable ();
1853 /* Find the CALL insn we just emitted, and attach the register usage
1854 information. */
1855 call_insn = last_call_insn ();
1856 add_function_usage_to (call_insn, call_fusage);
1858 /* Restore the stack. */
1859 if (targetm.have_save_stack_nonlocal ())
1860 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1861 else
1862 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1863 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1865 OK_DEFER_POP;
1867 /* Return the address of the result block. */
1868 result = copy_addr_to_reg (XEXP (result, 0));
1869 return convert_memory_address (ptr_mode, result);
1872 /* Perform an untyped return. */
1874 static void
1875 expand_builtin_return (rtx result)
1877 int size, align, regno;
1878 fixed_size_mode mode;
1879 rtx reg;
1880 rtx_insn *call_fusage = 0;
1882 result = convert_memory_address (Pmode, result);
1884 apply_result_size ();
1885 result = gen_rtx_MEM (BLKmode, result);
1887 if (targetm.have_untyped_return ())
1889 rtx vector = result_vector (0, result);
1890 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1891 emit_barrier ();
1892 return;
1895 /* Restore the return value and note that each value is used. */
1896 size = 0;
1897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898 if ((mode = apply_result_mode[regno]) != VOIDmode)
1900 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1901 if (size % align != 0)
1902 size = CEIL (size, align) * align;
1903 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1904 emit_move_insn (reg, adjust_address (result, mode, size));
1906 push_to_sequence (call_fusage);
1907 emit_use (reg);
1908 call_fusage = get_insns ();
1909 end_sequence ();
1910 size += GET_MODE_SIZE (mode);
1913 /* Put the USE insns before the return. */
1914 emit_insn (call_fusage);
1916 /* Return whatever values was restored by jumping directly to the end
1917 of the function. */
1918 expand_naked_return ();
1921 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1923 static enum type_class
1924 type_to_class (tree type)
1926 switch (TREE_CODE (type))
1928 case VOID_TYPE: return void_type_class;
1929 case INTEGER_TYPE: return integer_type_class;
1930 case ENUMERAL_TYPE: return enumeral_type_class;
1931 case BOOLEAN_TYPE: return boolean_type_class;
1932 case POINTER_TYPE: return pointer_type_class;
1933 case REFERENCE_TYPE: return reference_type_class;
1934 case OFFSET_TYPE: return offset_type_class;
1935 case REAL_TYPE: return real_type_class;
1936 case COMPLEX_TYPE: return complex_type_class;
1937 case FUNCTION_TYPE: return function_type_class;
1938 case METHOD_TYPE: return method_type_class;
1939 case RECORD_TYPE: return record_type_class;
1940 case UNION_TYPE:
1941 case QUAL_UNION_TYPE: return union_type_class;
1942 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1943 ? string_type_class : array_type_class);
1944 case LANG_TYPE: return lang_type_class;
1945 default: return no_type_class;
1949 /* Expand a call EXP to __builtin_classify_type. */
1951 static rtx
1952 expand_builtin_classify_type (tree exp)
1954 if (call_expr_nargs (exp))
1955 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1956 return GEN_INT (no_type_class);
1959 /* This helper macro, meant to be used in mathfn_built_in below, determines
1960 which among a set of builtin math functions is appropriate for a given type
1961 mode. The `F' (float) and `L' (long double) are automatically generated
1962 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1963 types, there are additional types that are considered with 'F32', 'F64',
1964 'F128', etc. suffixes. */
1965 #define CASE_MATHFN(MATHFN) \
1966 CASE_CFN_##MATHFN: \
1967 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1968 fcodel = BUILT_IN_##MATHFN##L ; break;
1969 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1970 types. */
1971 #define CASE_MATHFN_FLOATN(MATHFN) \
1972 CASE_CFN_##MATHFN: \
1973 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1974 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1975 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1976 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1977 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1978 break;
1979 /* Similar to above, but appends _R after any F/L suffix. */
1980 #define CASE_MATHFN_REENT(MATHFN) \
1981 case CFN_BUILT_IN_##MATHFN##_R: \
1982 case CFN_BUILT_IN_##MATHFN##F_R: \
1983 case CFN_BUILT_IN_##MATHFN##L_R: \
1984 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1985 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1987 /* Return a function equivalent to FN but operating on floating-point
1988 values of type TYPE, or END_BUILTINS if no such function exists.
1989 This is purely an operation on function codes; it does not guarantee
1990 that the target actually has an implementation of the function. */
1992 static built_in_function
1993 mathfn_built_in_2 (tree type, combined_fn fn)
1995 tree mtype;
1996 built_in_function fcode, fcodef, fcodel;
1997 built_in_function fcodef16 = END_BUILTINS;
1998 built_in_function fcodef32 = END_BUILTINS;
1999 built_in_function fcodef64 = END_BUILTINS;
2000 built_in_function fcodef128 = END_BUILTINS;
2001 built_in_function fcodef32x = END_BUILTINS;
2002 built_in_function fcodef64x = END_BUILTINS;
2003 built_in_function fcodef128x = END_BUILTINS;
2005 switch (fn)
2007 CASE_MATHFN (ACOS)
2008 CASE_MATHFN (ACOSH)
2009 CASE_MATHFN (ASIN)
2010 CASE_MATHFN (ASINH)
2011 CASE_MATHFN (ATAN)
2012 CASE_MATHFN (ATAN2)
2013 CASE_MATHFN (ATANH)
2014 CASE_MATHFN (CBRT)
2015 CASE_MATHFN_FLOATN (CEIL)
2016 CASE_MATHFN (CEXPI)
2017 CASE_MATHFN_FLOATN (COPYSIGN)
2018 CASE_MATHFN (COS)
2019 CASE_MATHFN (COSH)
2020 CASE_MATHFN (DREM)
2021 CASE_MATHFN (ERF)
2022 CASE_MATHFN (ERFC)
2023 CASE_MATHFN (EXP)
2024 CASE_MATHFN (EXP10)
2025 CASE_MATHFN (EXP2)
2026 CASE_MATHFN (EXPM1)
2027 CASE_MATHFN (FABS)
2028 CASE_MATHFN (FDIM)
2029 CASE_MATHFN_FLOATN (FLOOR)
2030 CASE_MATHFN_FLOATN (FMA)
2031 CASE_MATHFN_FLOATN (FMAX)
2032 CASE_MATHFN_FLOATN (FMIN)
2033 CASE_MATHFN (FMOD)
2034 CASE_MATHFN (FREXP)
2035 CASE_MATHFN (GAMMA)
2036 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2037 CASE_MATHFN (HUGE_VAL)
2038 CASE_MATHFN (HYPOT)
2039 CASE_MATHFN (ILOGB)
2040 CASE_MATHFN (ICEIL)
2041 CASE_MATHFN (IFLOOR)
2042 CASE_MATHFN (INF)
2043 CASE_MATHFN (IRINT)
2044 CASE_MATHFN (IROUND)
2045 CASE_MATHFN (ISINF)
2046 CASE_MATHFN (J0)
2047 CASE_MATHFN (J1)
2048 CASE_MATHFN (JN)
2049 CASE_MATHFN (LCEIL)
2050 CASE_MATHFN (LDEXP)
2051 CASE_MATHFN (LFLOOR)
2052 CASE_MATHFN (LGAMMA)
2053 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2054 CASE_MATHFN (LLCEIL)
2055 CASE_MATHFN (LLFLOOR)
2056 CASE_MATHFN (LLRINT)
2057 CASE_MATHFN (LLROUND)
2058 CASE_MATHFN (LOG)
2059 CASE_MATHFN (LOG10)
2060 CASE_MATHFN (LOG1P)
2061 CASE_MATHFN (LOG2)
2062 CASE_MATHFN (LOGB)
2063 CASE_MATHFN (LRINT)
2064 CASE_MATHFN (LROUND)
2065 CASE_MATHFN (MODF)
2066 CASE_MATHFN (NAN)
2067 CASE_MATHFN (NANS)
2068 CASE_MATHFN_FLOATN (NEARBYINT)
2069 CASE_MATHFN (NEXTAFTER)
2070 CASE_MATHFN (NEXTTOWARD)
2071 CASE_MATHFN (POW)
2072 CASE_MATHFN (POWI)
2073 CASE_MATHFN (POW10)
2074 CASE_MATHFN (REMAINDER)
2075 CASE_MATHFN (REMQUO)
2076 CASE_MATHFN_FLOATN (RINT)
2077 CASE_MATHFN_FLOATN (ROUND)
2078 CASE_MATHFN (SCALB)
2079 CASE_MATHFN (SCALBLN)
2080 CASE_MATHFN (SCALBN)
2081 CASE_MATHFN (SIGNBIT)
2082 CASE_MATHFN (SIGNIFICAND)
2083 CASE_MATHFN (SIN)
2084 CASE_MATHFN (SINCOS)
2085 CASE_MATHFN (SINH)
2086 CASE_MATHFN_FLOATN (SQRT)
2087 CASE_MATHFN (TAN)
2088 CASE_MATHFN (TANH)
2089 CASE_MATHFN (TGAMMA)
2090 CASE_MATHFN_FLOATN (TRUNC)
2091 CASE_MATHFN (Y0)
2092 CASE_MATHFN (Y1)
2093 CASE_MATHFN (YN)
2095 default:
2096 return END_BUILTINS;
2099 mtype = TYPE_MAIN_VARIANT (type);
2100 if (mtype == double_type_node)
2101 return fcode;
2102 else if (mtype == float_type_node)
2103 return fcodef;
2104 else if (mtype == long_double_type_node)
2105 return fcodel;
2106 else if (mtype == float16_type_node)
2107 return fcodef16;
2108 else if (mtype == float32_type_node)
2109 return fcodef32;
2110 else if (mtype == float64_type_node)
2111 return fcodef64;
2112 else if (mtype == float128_type_node)
2113 return fcodef128;
2114 else if (mtype == float32x_type_node)
2115 return fcodef32x;
2116 else if (mtype == float64x_type_node)
2117 return fcodef64x;
2118 else if (mtype == float128x_type_node)
2119 return fcodef128x;
2120 else
2121 return END_BUILTINS;
2124 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2125 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2126 otherwise use the explicit declaration. If we can't do the conversion,
2127 return null. */
2129 static tree
2130 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2132 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2133 if (fcode2 == END_BUILTINS)
2134 return NULL_TREE;
2136 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2137 return NULL_TREE;
2139 return builtin_decl_explicit (fcode2);
2142 /* Like mathfn_built_in_1, but always use the implicit array. */
2144 tree
2145 mathfn_built_in (tree type, combined_fn fn)
2147 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2150 /* Like mathfn_built_in_1, but take a built_in_function and
2151 always use the implicit array. */
2153 tree
2154 mathfn_built_in (tree type, enum built_in_function fn)
2156 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2159 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2160 return its code, otherwise return IFN_LAST. Note that this function
2161 only tests whether the function is defined in internals.def, not whether
2162 it is actually available on the target. */
2164 internal_fn
2165 associated_internal_fn (tree fndecl)
2167 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2168 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2174 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2175 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2176 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2177 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2178 #include "internal-fn.def"
2180 CASE_FLT_FN (BUILT_IN_POW10):
2181 return IFN_EXP10;
2183 CASE_FLT_FN (BUILT_IN_DREM):
2184 return IFN_REMAINDER;
2186 CASE_FLT_FN (BUILT_IN_SCALBN):
2187 CASE_FLT_FN (BUILT_IN_SCALBLN):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2189 return IFN_LDEXP;
2190 return IFN_LAST;
2192 default:
2193 return IFN_LAST;
2197 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2198 on the current target by a call to an internal function, return the
2199 code of that internal function, otherwise return IFN_LAST. The caller
2200 is responsible for ensuring that any side-effects of the built-in
2201 call are dealt with correctly. E.g. if CALL sets errno, the caller
2202 must decide that the errno result isn't needed or make it available
2203 in some other way. */
2205 internal_fn
2206 replacement_internal_fn (gcall *call)
2208 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2210 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2211 if (ifn != IFN_LAST)
2213 tree_pair types = direct_internal_fn_types (ifn, call);
2214 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2215 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2216 return ifn;
2219 return IFN_LAST;
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2232 optab builtin_optab;
2233 rtx op0, op1, op2, result;
2234 rtx_insn *insns;
2235 tree fndecl = get_callee_fndecl (exp);
2236 tree arg0, arg1, arg2;
2237 machine_mode mode;
2239 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240 return NULL_RTX;
2242 arg0 = CALL_EXPR_ARG (exp, 0);
2243 arg1 = CALL_EXPR_ARG (exp, 1);
2244 arg2 = CALL_EXPR_ARG (exp, 2);
2246 switch (DECL_FUNCTION_CODE (fndecl))
2248 CASE_FLT_FN (BUILT_IN_FMA):
2249 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2262 result = gen_reg_rtx (mode);
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2273 start_sequence ();
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2294 return result;
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2307 optab builtin_optab;
2308 rtx op0;
2309 rtx_insn *insns;
2310 tree fndecl = get_callee_fndecl (exp);
2311 machine_mode mode;
2312 tree arg;
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2317 arg = CALL_EXPR_ARG (exp, 0);
2319 switch (DECL_FUNCTION_CODE (fndecl))
2321 CASE_FLT_FN (BUILT_IN_SIN):
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = sincos_optab; break;
2324 default:
2325 gcc_unreachable ();
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (exp));
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334 switch (DECL_FUNCTION_CODE (fndecl))
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 builtin_optab = sin_optab; break;
2338 CASE_FLT_FN (BUILT_IN_COS):
2339 builtin_optab = cos_optab; break;
2340 default:
2341 gcc_unreachable ();
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2347 rtx result = gen_reg_rtx (mode);
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2354 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2356 start_sequence ();
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab == sincos_optab)
2362 int ok;
2364 switch (DECL_FUNCTION_CODE (fndecl))
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 break;
2372 default:
2373 gcc_unreachable ();
2375 gcc_assert (ok);
2377 else
2378 result = expand_unop (mode, builtin_optab, op0, result, 0);
2380 if (result != 0)
2382 /* Output the entire sequence. */
2383 insns = get_insns ();
2384 end_sequence ();
2385 emit_insn (insns);
2386 return result;
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2392 end_sequence ();
2395 return expand_call (exp, target, target == const0_rtx);
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2405 bool errno_set = false;
2406 optab builtin_optab = unknown_optab;
2407 machine_mode mode;
2409 switch (DECL_FUNCTION_CODE (fndecl))
2411 CASE_FLT_FN (BUILT_IN_ILOGB):
2412 errno_set = true; builtin_optab = ilogb_optab; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF):
2414 builtin_optab = isinf_optab; break;
2415 case BUILT_IN_ISNORMAL:
2416 case BUILT_IN_ISFINITE:
2417 CASE_FLT_FN (BUILT_IN_FINITE):
2418 case BUILT_IN_FINITED32:
2419 case BUILT_IN_FINITED64:
2420 case BUILT_IN_FINITED128:
2421 case BUILT_IN_ISINFD32:
2422 case BUILT_IN_ISINFD64:
2423 case BUILT_IN_ISINFD128:
2424 /* These builtins have no optabs (yet). */
2425 break;
2426 default:
2427 gcc_unreachable ();
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math && errno_set)
2432 return CODE_FOR_nothing;
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 if (builtin_optab)
2438 return optab_handler (builtin_optab, mode);
2439 return CODE_FOR_nothing;
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2444 isnan, etc).
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2452 enum insn_code icode = CODE_FOR_nothing;
2453 rtx op0;
2454 tree fndecl = get_callee_fndecl (exp);
2455 machine_mode mode;
2456 tree arg;
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 icode = interclass_mathfn_icode (arg, fndecl);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2465 if (icode != CODE_FOR_nothing)
2467 struct expand_operand ops[1];
2468 rtx_insn *last = get_last_insn ();
2469 tree orig_arg = arg;
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2476 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2478 if (mode != GET_MODE (op0))
2479 op0 = convert_to_mode (mode, op0, 0);
2481 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482 if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 return ops[0].value;
2486 delete_insns_since (last);
2487 CALL_EXPR_ARG (exp, 0) = orig_arg;
2490 return NULL_RTX;
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function. */
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2501 rtx op0, op1, op2, target1, target2;
2502 machine_mode mode;
2503 tree arg, sinp, cosp;
2504 int result;
2505 location_t loc = EXPR_LOCATION (exp);
2506 tree alias_type, alias_off;
2508 if (!validate_arglist (exp, REAL_TYPE,
2509 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 sinp = CALL_EXPR_ARG (exp, 1);
2514 cosp = CALL_EXPR_ARG (exp, 2);
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (arg));
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521 return NULL_RTX;
2523 target1 = gen_reg_rtx (mode);
2524 target2 = gen_reg_rtx (mode);
2526 op0 = expand_normal (arg);
2527 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528 alias_off = build_int_cst (alias_type, 0);
2529 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 sinp, alias_off));
2531 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 cosp, alias_off));
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537 gcc_assert (result);
2539 /* Move target1 and target2 to the memory locations indicated
2540 by op1 and op2. */
2541 emit_move_insn (op1, target1);
2542 emit_move_insn (op2, target2);
2544 return const0_rtx;
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2554 tree fndecl = get_callee_fndecl (exp);
2555 tree arg, type;
2556 machine_mode mode;
2557 rtx op0, op1, op2;
2558 location_t loc = EXPR_LOCATION (exp);
2560 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 type = TREE_TYPE (arg);
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2572 op1 = gen_reg_rtx (mode);
2573 op2 = gen_reg_rtx (mode);
2575 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2580 else if (targetm.libc_has_function (function_sincos))
2582 tree call, fn = NULL_TREE;
2583 tree top1, top2;
2584 rtx op1a, op2a;
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592 else
2593 gcc_unreachable ();
2595 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597 op1a = copy_addr_to_reg (XEXP (op1, 0));
2598 op2a = copy_addr_to_reg (XEXP (op2, 0));
2599 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2602 /* Make sure not to fold the sincos call again. */
2603 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 call, 3, arg, top1, top2));
2607 else
2609 tree call, fn = NULL_TREE, narg;
2610 tree ctype = build_complex_type (type);
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618 else
2619 gcc_unreachable ();
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn == NULL_TREE)
2626 tree fntype;
2627 const char *name = NULL;
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 name = "cexpf";
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 name = "cexp";
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 name = "cexpl";
2636 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 fn = build_fn_decl (name, fntype);
2640 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 build_real (type, dconst0), arg);
2643 /* Make sure not to fold the cexp call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 target, VOIDmode, EXPAND_NORMAL);
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 make_tree (TREE_TYPE (arg), op2),
2652 make_tree (TREE_TYPE (arg), op1)),
2653 target, VOIDmode, EXPAND_NORMAL);
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2664 va_list ap;
2665 tree fntype = TREE_TYPE (fndecl);
2666 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2668 va_start (ap, n);
2669 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670 va_end (ap);
2671 SET_EXPR_LOCATION (fn, loc);
2672 return fn;
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2685 convert_optab builtin_optab;
2686 rtx op0, tmp;
2687 rtx_insn *insns;
2688 tree fndecl = get_callee_fndecl (exp);
2689 enum built_in_function fallback_fn;
2690 tree fallback_fndecl;
2691 machine_mode mode;
2692 tree arg;
2694 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695 return NULL_RTX;
2697 arg = CALL_EXPR_ARG (exp, 0);
2699 switch (DECL_FUNCTION_CODE (fndecl))
2701 CASE_FLT_FN (BUILT_IN_ICEIL):
2702 CASE_FLT_FN (BUILT_IN_LCEIL):
2703 CASE_FLT_FN (BUILT_IN_LLCEIL):
2704 builtin_optab = lceil_optab;
2705 fallback_fn = BUILT_IN_CEIL;
2706 break;
2708 CASE_FLT_FN (BUILT_IN_IFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LFLOOR):
2710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711 builtin_optab = lfloor_optab;
2712 fallback_fn = BUILT_IN_FLOOR;
2713 break;
2715 default:
2716 gcc_unreachable ();
2719 /* Make a suitable register to place result in. */
2720 mode = TYPE_MODE (TREE_TYPE (exp));
2722 target = gen_reg_rtx (mode);
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2729 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2731 start_sequence ();
2733 /* Compute into TARGET. */
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return target;
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns). */
2745 end_sequence ();
2747 /* Fall back to floating point rounding optab. */
2748 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2750 /* For non-C99 targets we may end up without a fallback fndecl here
2751 if the user called __builtin_lfloor directly. In this case emit
2752 a call to the floor/ceil variants nevertheless. This should result
2753 in the best user experience for not full C99 targets. */
2754 if (fallback_fndecl == NULL_TREE)
2756 tree fntype;
2757 const char *name = NULL;
2759 switch (DECL_FUNCTION_CODE (fndecl))
2761 case BUILT_IN_ICEIL:
2762 case BUILT_IN_LCEIL:
2763 case BUILT_IN_LLCEIL:
2764 name = "ceil";
2765 break;
2766 case BUILT_IN_ICEILF:
2767 case BUILT_IN_LCEILF:
2768 case BUILT_IN_LLCEILF:
2769 name = "ceilf";
2770 break;
2771 case BUILT_IN_ICEILL:
2772 case BUILT_IN_LCEILL:
2773 case BUILT_IN_LLCEILL:
2774 name = "ceill";
2775 break;
2776 case BUILT_IN_IFLOOR:
2777 case BUILT_IN_LFLOOR:
2778 case BUILT_IN_LLFLOOR:
2779 name = "floor";
2780 break;
2781 case BUILT_IN_IFLOORF:
2782 case BUILT_IN_LFLOORF:
2783 case BUILT_IN_LLFLOORF:
2784 name = "floorf";
2785 break;
2786 case BUILT_IN_IFLOORL:
2787 case BUILT_IN_LFLOORL:
2788 case BUILT_IN_LLFLOORL:
2789 name = "floorl";
2790 break;
2791 default:
2792 gcc_unreachable ();
2795 fntype = build_function_type_list (TREE_TYPE (arg),
2796 TREE_TYPE (arg), NULL_TREE);
2797 fallback_fndecl = build_fn_decl (name, fntype);
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2802 tmp = expand_normal (exp);
2803 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2805 /* Truncate the result of floating point optab to integer
2806 via expand_fix (). */
2807 target = gen_reg_rtx (mode);
2808 expand_fix (target, tmp, 0);
2810 return target;
2813 /* Expand a call to one of the builtin math functions doing integer
2814 conversion (lrint).
2815 Return 0 if a normal call should be emitted rather than expanding the
2816 function in-line. EXP is the expression that is a call to the builtin
2817 function; if convenient, the result should be placed in TARGET. */
2819 static rtx
2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2822 convert_optab builtin_optab;
2823 rtx op0;
2824 rtx_insn *insns;
2825 tree fndecl = get_callee_fndecl (exp);
2826 tree arg;
2827 machine_mode mode;
2828 enum built_in_function fallback_fn = BUILT_IN_NONE;
2830 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831 return NULL_RTX;
2833 arg = CALL_EXPR_ARG (exp, 0);
2835 switch (DECL_FUNCTION_CODE (fndecl))
2837 CASE_FLT_FN (BUILT_IN_IRINT):
2838 fallback_fn = BUILT_IN_LRINT;
2839 gcc_fallthrough ();
2840 CASE_FLT_FN (BUILT_IN_LRINT):
2841 CASE_FLT_FN (BUILT_IN_LLRINT):
2842 builtin_optab = lrint_optab;
2843 break;
2845 CASE_FLT_FN (BUILT_IN_IROUND):
2846 fallback_fn = BUILT_IN_LROUND;
2847 gcc_fallthrough ();
2848 CASE_FLT_FN (BUILT_IN_LROUND):
2849 CASE_FLT_FN (BUILT_IN_LLROUND):
2850 builtin_optab = lround_optab;
2851 break;
2853 default:
2854 gcc_unreachable ();
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859 return NULL_RTX;
2861 /* Make a suitable register to place result in. */
2862 mode = TYPE_MODE (TREE_TYPE (exp));
2864 /* There's no easy way to detect the case we need to set EDOM. */
2865 if (!flag_errno_math)
2867 rtx result = gen_reg_rtx (mode);
2869 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 need to expand the argument again. This way, we will not perform
2871 side-effects more the once. */
2872 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2874 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2876 start_sequence ();
2878 if (expand_sfix_optab (result, op0, builtin_optab))
2880 /* Output the entire sequence. */
2881 insns = get_insns ();
2882 end_sequence ();
2883 emit_insn (insns);
2884 return result;
2887 /* If we were unable to expand via the builtin, stop the sequence
2888 (without outputting the insns) and call to the library function
2889 with the stabilized argument list. */
2890 end_sequence ();
2893 if (fallback_fn != BUILT_IN_NONE)
2895 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2896 targets, (int) round (x) should never be transformed into
2897 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 a call to lround in the hope that the target provides at least some
2899 C99 functions. This should result in the best user experience for
2900 not full C99 targets. */
2901 tree fallback_fndecl = mathfn_built_in_1
2902 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2904 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 fallback_fndecl, 1, arg);
2907 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909 return convert_to_mode (mode, target, 0);
2912 return expand_call (exp, target, target == const0_rtx);
2915 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2916 a normal call should be emitted rather than expanding the function
2917 in-line. EXP is the expression that is a call to the builtin
2918 function; if convenient, the result should be placed in TARGET. */
2920 static rtx
2921 expand_builtin_powi (tree exp, rtx target)
2923 tree arg0, arg1;
2924 rtx op0, op1;
2925 machine_mode mode;
2926 machine_mode mode2;
2928 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2933 mode = TYPE_MODE (TREE_TYPE (exp));
2935 /* Emit a libcall to libgcc. */
2937 /* Mode of the 2nd argument must match that of an int. */
2938 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2940 if (target == NULL_RTX)
2941 target = gen_reg_rtx (mode);
2943 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944 if (GET_MODE (op0) != mode)
2945 op0 = convert_to_mode (mode, op0, 0);
2946 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947 if (GET_MODE (op1) != mode2)
2948 op1 = convert_to_mode (mode2, op1, 0);
2950 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 target, LCT_CONST, mode,
2952 op0, mode, op1, mode2);
2954 return target;
2957 /* Expand expression EXP which is a call to the strlen builtin. Return
2958 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2959 try to get the result in TARGET, if convenient. */
2961 static rtx
2962 expand_builtin_strlen (tree exp, rtx target,
2963 machine_mode target_mode)
2965 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2968 struct expand_operand ops[4];
2969 rtx pat;
2970 tree len;
2971 tree src = CALL_EXPR_ARG (exp, 0);
2972 rtx src_reg;
2973 rtx_insn *before_strlen;
2974 machine_mode insn_mode;
2975 enum insn_code icode = CODE_FOR_nothing;
2976 unsigned int align;
2978 /* If the length can be computed at compile-time, return it. */
2979 len = c_strlen (src, 0);
2980 if (len)
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len = c_strlen (src, 1);
2989 if (len && TREE_CODE (len) == INTEGER_CST)
2991 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2995 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2997 /* If SRC is not a pointer type, don't do this operation inline. */
2998 if (align == 0)
2999 return NULL_RTX;
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3004 icode = optab_handler (strlen_optab, insn_mode);
3005 if (icode != CODE_FOR_nothing)
3006 break;
3008 if (insn_mode == VOIDmode)
3009 return NULL_RTX;
3011 /* Make a place to hold the source address. We will not expand
3012 the actual source until we are sure that the expansion will
3013 not fail -- there are trees that cannot be expanded twice. */
3014 src_reg = gen_reg_rtx (Pmode);
3016 /* Mark the beginning of the strlen sequence so we can emit the
3017 source operand later. */
3018 before_strlen = get_last_insn ();
3020 create_output_operand (&ops[0], target, insn_mode);
3021 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022 create_integer_operand (&ops[2], 0);
3023 create_integer_operand (&ops[3], align);
3024 if (!maybe_expand_insn (icode, 4, ops))
3025 return NULL_RTX;
3027 /* Check to see if the argument was declared attribute nonstring
3028 and if so, issue a warning since at this point it's not known
3029 to be nul-terminated. */
3030 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3032 /* Now that we are assured of success, expand the source. */
3033 start_sequence ();
3034 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3035 if (pat != src_reg)
3037 #ifdef POINTERS_EXTEND_UNSIGNED
3038 if (GET_MODE (pat) != Pmode)
3039 pat = convert_to_mode (Pmode, pat,
3040 POINTERS_EXTEND_UNSIGNED);
3041 #endif
3042 emit_move_insn (src_reg, pat);
3044 pat = get_insns ();
3045 end_sequence ();
3047 if (before_strlen)
3048 emit_insn_after (pat, before_strlen);
3049 else
3050 emit_insn_before (pat, get_insns ());
3052 /* Return the value in the proper mode for this function. */
3053 if (GET_MODE (ops[0].value) == target_mode)
3054 target = ops[0].value;
3055 else if (target != 0)
3056 convert_move (target, ops[0].value, 0);
3057 else
3058 target = convert_to_mode (target_mode, ops[0].value, 0);
3060 return target;
3063 /* Expand call EXP to the strnlen built-in, returning the result
3064 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3066 static rtx
3067 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3069 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3070 return NULL_RTX;
3072 tree src = CALL_EXPR_ARG (exp, 0);
3073 tree bound = CALL_EXPR_ARG (exp, 1);
3075 if (!bound)
3076 return NULL_RTX;
3078 location_t loc = UNKNOWN_LOCATION;
3079 if (EXPR_HAS_LOCATION (exp))
3080 loc = EXPR_LOCATION (exp);
3082 tree maxobjsize = max_object_size ();
3083 tree func = get_callee_fndecl (exp);
3085 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3086 so these conversions aren't necessary. */
3087 c_strlen_data lendata = { };
3088 tree len = c_strlen (src, 0, &lendata, 1);
3089 if (len)
3090 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3092 if (TREE_CODE (bound) == INTEGER_CST)
3094 if (!TREE_NO_WARNING (exp)
3095 && tree_int_cst_lt (maxobjsize, bound)
3096 && warning_at (loc, OPT_Wstringop_overflow_,
3097 "%K%qD specified bound %E "
3098 "exceeds maximum object size %E",
3099 exp, func, bound, maxobjsize))
3100 TREE_NO_WARNING (exp) = true;
3102 bool exact = true;
3103 if (!len || TREE_CODE (len) != INTEGER_CST)
3105 /* Clear EXACT if LEN may be less than SRC suggests,
3106 such as in
3107 strnlen (&a[i], sizeof a)
3108 where the value of i is unknown. Unless i's value is
3109 zero, the call is unsafe because the bound is greater. */
3110 lendata.decl = unterminated_array (src, &len, &exact);
3111 if (!lendata.decl)
3112 return NULL_RTX;
3115 if (lendata.decl
3116 && !TREE_NO_WARNING (exp)
3117 && ((tree_int_cst_lt (len, bound))
3118 || !exact))
3120 location_t warnloc
3121 = expansion_point_location_if_in_system_header (loc);
3123 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3124 exact
3125 ? G_("%K%qD specified bound %E exceeds the size %E "
3126 "of unterminated array")
3127 : G_("%K%qD specified bound %E may exceed the size "
3128 "of at most %E of unterminated array"),
3129 exp, func, bound, len))
3131 inform (DECL_SOURCE_LOCATION (lendata.decl),
3132 "referenced argument declared here");
3133 TREE_NO_WARNING (exp) = true;
3134 return NULL_RTX;
3138 if (!len)
3139 return NULL_RTX;
3141 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 if (TREE_CODE (bound) != SSA_NAME)
3146 return NULL_RTX;
3148 wide_int min, max;
3149 enum value_range_kind rng = get_range_info (bound, &min, &max);
3150 if (rng != VR_RANGE)
3151 return NULL_RTX;
3153 if (!TREE_NO_WARNING (exp)
3154 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3155 && warning_at (loc, OPT_Wstringop_overflow_,
3156 "%K%qD specified bound [%wu, %wu] "
3157 "exceeds maximum object size %E",
3158 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3159 TREE_NO_WARNING (exp) = true;
3161 bool exact = true;
3162 if (!len || TREE_CODE (len) != INTEGER_CST)
3164 lendata.decl = unterminated_array (src, &len, &exact);
3165 if (!lendata.decl)
3166 return NULL_RTX;
3169 if (lendata.decl
3170 && !TREE_NO_WARNING (exp)
3171 && (wi::ltu_p (wi::to_wide (len), min)
3172 || !exact))
3174 location_t warnloc
3175 = expansion_point_location_if_in_system_header (loc);
3177 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3178 exact
3179 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3180 "the size %E of unterminated array")
3181 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3182 "the size of at most %E of unterminated array"),
3183 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3185 inform (DECL_SOURCE_LOCATION (lendata.decl),
3186 "referenced argument declared here");
3187 TREE_NO_WARNING (exp) = true;
3191 if (lendata.decl)
3192 return NULL_RTX;
3194 if (wi::gtu_p (min, wi::to_wide (len)))
3195 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3197 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3198 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3201 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3202 bytes from constant string DATA + OFFSET and return it as target
3203 constant. */
3205 static rtx
3206 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3207 scalar_int_mode mode)
3209 const char *str = (const char *) data;
3211 gcc_assert (offset >= 0
3212 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3213 <= strlen (str) + 1));
3215 return c_readstr (str + offset, mode);
3218 /* LEN specify length of the block of memcpy/memset operation.
3219 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3220 In some cases we can make very likely guess on max size, then we
3221 set it into PROBABLE_MAX_SIZE. */
3223 static void
3224 determine_block_size (tree len, rtx len_rtx,
3225 unsigned HOST_WIDE_INT *min_size,
3226 unsigned HOST_WIDE_INT *max_size,
3227 unsigned HOST_WIDE_INT *probable_max_size)
3229 if (CONST_INT_P (len_rtx))
3231 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3232 return;
3234 else
3236 wide_int min, max;
3237 enum value_range_kind range_type = VR_UNDEFINED;
3239 /* Determine bounds from the type. */
3240 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3241 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3242 else
3243 *min_size = 0;
3244 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3245 *probable_max_size = *max_size
3246 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3247 else
3248 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3250 if (TREE_CODE (len) == SSA_NAME)
3251 range_type = get_range_info (len, &min, &max);
3252 if (range_type == VR_RANGE)
3254 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3255 *min_size = min.to_uhwi ();
3256 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3257 *probable_max_size = *max_size = max.to_uhwi ();
3259 else if (range_type == VR_ANTI_RANGE)
3261 /* Anti range 0...N lets us to determine minimal size to N+1. */
3262 if (min == 0)
3264 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3265 *min_size = max.to_uhwi () + 1;
3267 /* Code like
3269 int n;
3270 if (n < 100)
3271 memcpy (a, b, n)
3273 Produce anti range allowing negative values of N. We still
3274 can use the information and make a guess that N is not negative.
3276 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3277 *probable_max_size = min.to_uhwi () - 1;
3280 gcc_checking_assert (*max_size <=
3281 (unsigned HOST_WIDE_INT)
3282 GET_MODE_MASK (GET_MODE (len_rtx)));
3285 /* Try to verify that the sizes and lengths of the arguments to a string
3286 manipulation function given by EXP are within valid bounds and that
3287 the operation does not lead to buffer overflow or read past the end.
3288 Arguments other than EXP may be null. When non-null, the arguments
3289 have the following meaning:
3290 DST is the destination of a copy call or NULL otherwise.
3291 SRC is the source of a copy call or NULL otherwise.
3292 DSTWRITE is the number of bytes written into the destination obtained
3293 from the user-supplied size argument to the function (such as in
3294 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3295 MAXREAD is the user-supplied bound on the length of the source sequence
3296 (such as in strncat(d, s, N). It specifies the upper limit on the number
3297 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3298 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3299 expression EXP is a string function call (as opposed to a memory call
3300 like memcpy). As an exception, SRCSTR can also be an integer denoting
3301 the precomputed size of the source string or object (for functions like
3302 memcpy).
3303 DSTSIZE is the size of the destination object specified by the last
3304 argument to the _chk builtins, typically resulting from the expansion
3305 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3306 DSTSIZE).
3308 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3309 SIZE_MAX.
3311 If the call is successfully verified as safe return true, otherwise
3312 return false. */
3314 static bool
3315 check_access (tree exp, tree, tree, tree dstwrite,
3316 tree maxread, tree srcstr, tree dstsize)
3318 int opt = OPT_Wstringop_overflow_;
3320 /* The size of the largest object is half the address space, or
3321 PTRDIFF_MAX. (This is way too permissive.) */
3322 tree maxobjsize = max_object_size ();
3324 /* Either the length of the source string for string functions or
3325 the size of the source object for raw memory functions. */
3326 tree slen = NULL_TREE;
3328 tree range[2] = { NULL_TREE, NULL_TREE };
3330 /* Set to true when the exact number of bytes written by a string
3331 function like strcpy is not known and the only thing that is
3332 known is that it must be at least one (for the terminating nul). */
3333 bool at_least_one = false;
3334 if (srcstr)
3336 /* SRCSTR is normally a pointer to string but as a special case
3337 it can be an integer denoting the length of a string. */
3338 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3340 /* Try to determine the range of lengths the source string
3341 refers to. If it can be determined and is less than
3342 the upper bound given by MAXREAD add one to it for
3343 the terminating nul. Otherwise, set it to one for
3344 the same reason, or to MAXREAD as appropriate. */
3345 c_strlen_data lendata = { };
3346 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3347 range[0] = lendata.minlen;
3348 range[1] = lendata.maxbound;
3349 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3351 if (maxread && tree_int_cst_le (maxread, range[0]))
3352 range[0] = range[1] = maxread;
3353 else
3354 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3355 range[0], size_one_node);
3357 if (maxread && tree_int_cst_le (maxread, range[1]))
3358 range[1] = maxread;
3359 else if (!integer_all_onesp (range[1]))
3360 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3361 range[1], size_one_node);
3363 slen = range[0];
3365 else
3367 at_least_one = true;
3368 slen = size_one_node;
3371 else
3372 slen = srcstr;
3375 if (!dstwrite && !maxread)
3377 /* When the only available piece of data is the object size
3378 there is nothing to do. */
3379 if (!slen)
3380 return true;
3382 /* Otherwise, when the length of the source sequence is known
3383 (as with strlen), set DSTWRITE to it. */
3384 if (!range[0])
3385 dstwrite = slen;
3388 if (!dstsize)
3389 dstsize = maxobjsize;
3391 if (dstwrite)
3392 get_size_range (dstwrite, range);
3394 tree func = get_callee_fndecl (exp);
3396 /* First check the number of bytes to be written against the maximum
3397 object size. */
3398 if (range[0]
3399 && TREE_CODE (range[0]) == INTEGER_CST
3400 && tree_int_cst_lt (maxobjsize, range[0]))
3402 if (TREE_NO_WARNING (exp))
3403 return false;
3405 location_t loc = tree_nonartificial_location (exp);
3406 loc = expansion_point_location_if_in_system_header (loc);
3408 bool warned;
3409 if (range[0] == range[1])
3410 warned = warning_at (loc, opt,
3411 "%K%qD specified size %E "
3412 "exceeds maximum object size %E",
3413 exp, func, range[0], maxobjsize);
3414 else
3415 warned = warning_at (loc, opt,
3416 "%K%qD specified size between %E and %E "
3417 "exceeds maximum object size %E",
3418 exp, func,
3419 range[0], range[1], maxobjsize);
3420 if (warned)
3421 TREE_NO_WARNING (exp) = true;
3423 return false;
3426 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3427 constant, and in range of unsigned HOST_WIDE_INT. */
3428 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3430 /* Next check the number of bytes to be written against the destination
3431 object size. */
3432 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3434 if (range[0]
3435 && TREE_CODE (range[0]) == INTEGER_CST
3436 && ((tree_fits_uhwi_p (dstsize)
3437 && tree_int_cst_lt (dstsize, range[0]))
3438 || (dstwrite
3439 && tree_fits_uhwi_p (dstwrite)
3440 && tree_int_cst_lt (dstwrite, range[0]))))
3442 if (TREE_NO_WARNING (exp))
3443 return false;
3445 location_t loc = tree_nonartificial_location (exp);
3446 loc = expansion_point_location_if_in_system_header (loc);
3448 if (dstwrite == slen && at_least_one)
3450 /* This is a call to strcpy with a destination of 0 size
3451 and a source of unknown length. The call will write
3452 at least one byte past the end of the destination. */
3453 warning_at (loc, opt,
3454 "%K%qD writing %E or more bytes into a region "
3455 "of size %E overflows the destination",
3456 exp, func, range[0], dstsize);
3458 else if (tree_int_cst_equal (range[0], range[1]))
3459 warning_n (loc, opt, tree_to_uhwi (range[0]),
3460 "%K%qD writing %E byte into a region "
3461 "of size %E overflows the destination",
3462 "%K%qD writing %E bytes into a region "
3463 "of size %E overflows the destination",
3464 exp, func, range[0], dstsize);
3465 else if (tree_int_cst_sign_bit (range[1]))
3467 /* Avoid printing the upper bound if it's invalid. */
3468 warning_at (loc, opt,
3469 "%K%qD writing %E or more bytes into a region "
3470 "of size %E overflows the destination",
3471 exp, func, range[0], dstsize);
3473 else
3474 warning_at (loc, opt,
3475 "%K%qD writing between %E and %E bytes into "
3476 "a region of size %E overflows the destination",
3477 exp, func, range[0], range[1],
3478 dstsize);
3480 /* Return error when an overflow has been detected. */
3481 return false;
3485 /* Check the maximum length of the source sequence against the size
3486 of the destination object if known, or against the maximum size
3487 of an object. */
3488 if (maxread)
3490 get_size_range (maxread, range);
3492 /* Use the lower end for MAXREAD from now on. */
3493 if (range[0])
3494 maxread = range[0];
3496 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3498 location_t loc = tree_nonartificial_location (exp);
3499 loc = expansion_point_location_if_in_system_header (loc);
3501 if (tree_int_cst_lt (maxobjsize, range[0]))
3503 if (TREE_NO_WARNING (exp))
3504 return false;
3506 /* Warn about crazy big sizes first since that's more
3507 likely to be meaningful than saying that the bound
3508 is greater than the object size if both are big. */
3509 if (range[0] == range[1])
3510 warning_at (loc, opt,
3511 "%K%qD specified bound %E "
3512 "exceeds maximum object size %E",
3513 exp, func,
3514 range[0], maxobjsize);
3515 else
3516 warning_at (loc, opt,
3517 "%K%qD specified bound between %E and %E "
3518 "exceeds maximum object size %E",
3519 exp, func,
3520 range[0], range[1], maxobjsize);
3522 return false;
3525 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3527 if (TREE_NO_WARNING (exp))
3528 return false;
3530 if (tree_int_cst_equal (range[0], range[1]))
3531 warning_at (loc, opt,
3532 "%K%qD specified bound %E "
3533 "exceeds destination size %E",
3534 exp, func,
3535 range[0], dstsize);
3536 else
3537 warning_at (loc, opt,
3538 "%K%qD specified bound between %E and %E "
3539 "exceeds destination size %E",
3540 exp, func,
3541 range[0], range[1], dstsize);
3542 return false;
3547 /* Check for reading past the end of SRC. */
3548 if (slen
3549 && slen == srcstr
3550 && dstwrite && range[0]
3551 && tree_int_cst_lt (slen, range[0]))
3553 if (TREE_NO_WARNING (exp))
3554 return false;
3556 location_t loc = tree_nonartificial_location (exp);
3558 if (tree_int_cst_equal (range[0], range[1]))
3559 warning_n (loc, opt, tree_to_uhwi (range[0]),
3560 "%K%qD reading %E byte from a region of size %E",
3561 "%K%qD reading %E bytes from a region of size %E",
3562 exp, func, range[0], slen);
3563 else if (tree_int_cst_sign_bit (range[1]))
3565 /* Avoid printing the upper bound if it's invalid. */
3566 warning_at (loc, opt,
3567 "%K%qD reading %E or more bytes from a region "
3568 "of size %E",
3569 exp, func, range[0], slen);
3571 else
3572 warning_at (loc, opt,
3573 "%K%qD reading between %E and %E bytes from a region "
3574 "of size %E",
3575 exp, func, range[0], range[1], slen);
3576 return false;
3579 return true;
3582 /* Helper to compute the size of the object referenced by the DEST
3583 expression which must have pointer type, using Object Size type
3584 OSTYPE (only the least significant 2 bits are used). Return
3585 an estimate of the size of the object if successful or NULL when
3586 the size cannot be determined. When the referenced object involves
3587 a non-constant offset in some range the returned value represents
3588 the largest size given the smallest non-negative offset in the
3589 range. The function is intended for diagnostics and should not
3590 be used to influence code generation or optimization. */
3592 tree
3593 compute_objsize (tree dest, int ostype)
3595 unsigned HOST_WIDE_INT size;
3597 /* Only the two least significant bits are meaningful. */
3598 ostype &= 3;
3600 if (compute_builtin_object_size (dest, ostype, &size))
3601 return build_int_cst (sizetype, size);
3603 if (TREE_CODE (dest) == SSA_NAME)
3605 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3606 if (!is_gimple_assign (stmt))
3607 return NULL_TREE;
3609 dest = gimple_assign_rhs1 (stmt);
3611 tree_code code = gimple_assign_rhs_code (stmt);
3612 if (code == POINTER_PLUS_EXPR)
3614 /* compute_builtin_object_size fails for addresses with
3615 non-constant offsets. Try to determine the range of
3616 such an offset here and use it to adjust the constant
3617 size. */
3618 tree off = gimple_assign_rhs2 (stmt);
3619 if (TREE_CODE (off) == INTEGER_CST)
3621 if (tree size = compute_objsize (dest, ostype))
3623 wide_int wioff = wi::to_wide (off);
3624 wide_int wisiz = wi::to_wide (size);
3626 /* Ignore negative offsets for now. For others,
3627 use the lower bound as the most optimistic
3628 estimate of the (remaining) size. */
3629 if (wi::sign_mask (wioff))
3631 else if (wi::ltu_p (wioff, wisiz))
3632 return wide_int_to_tree (TREE_TYPE (size),
3633 wi::sub (wisiz, wioff));
3634 else
3635 return size_zero_node;
3638 else if (TREE_CODE (off) == SSA_NAME
3639 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3641 wide_int min, max;
3642 enum value_range_kind rng = get_range_info (off, &min, &max);
3644 if (rng == VR_RANGE)
3646 if (tree size = compute_objsize (dest, ostype))
3648 wide_int wisiz = wi::to_wide (size);
3650 /* Ignore negative offsets for now. For others,
3651 use the lower bound as the most optimistic
3652 estimate of the (remaining)size. */
3653 if (wi::sign_mask (min)
3654 || wi::sign_mask (max))
3656 else if (wi::ltu_p (min, wisiz))
3657 return wide_int_to_tree (TREE_TYPE (size),
3658 wi::sub (wisiz, min));
3659 else
3660 return size_zero_node;
3665 else if (code != ADDR_EXPR)
3666 return NULL_TREE;
3669 /* Unless computing the largest size (for memcpy and other raw memory
3670 functions), try to determine the size of the object from its type. */
3671 if (!ostype)
3672 return NULL_TREE;
3674 if (TREE_CODE (dest) != ADDR_EXPR)
3675 return NULL_TREE;
3677 tree type = TREE_TYPE (dest);
3678 if (TREE_CODE (type) == POINTER_TYPE)
3679 type = TREE_TYPE (type);
3681 type = TYPE_MAIN_VARIANT (type);
3683 if (TREE_CODE (type) == ARRAY_TYPE
3684 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3686 /* Return the constant size unless it's zero (that's a zero-length
3687 array likely at the end of a struct). */
3688 tree size = TYPE_SIZE_UNIT (type);
3689 if (size && TREE_CODE (size) == INTEGER_CST
3690 && !integer_zerop (size))
3691 return size;
3694 return NULL_TREE;
3697 /* Helper to determine and check the sizes of the source and the destination
3698 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3699 call expression, DEST is the destination argument, SRC is the source
3700 argument or null, and LEN is the number of bytes. Use Object Size type-0
3701 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3702 (no overflow or invalid sizes), false otherwise. */
3704 static bool
3705 check_memop_access (tree exp, tree dest, tree src, tree size)
3707 /* For functions like memset and memcpy that operate on raw memory
3708 try to determine the size of the largest source and destination
3709 object using type-0 Object Size regardless of the object size
3710 type specified by the option. */
3711 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3712 tree dstsize = compute_objsize (dest, 0);
3714 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3715 srcsize, dstsize);
3718 /* Validate memchr arguments without performing any expansion.
3719 Return NULL_RTX. */
3721 static rtx
3722 expand_builtin_memchr (tree exp, rtx)
3724 if (!validate_arglist (exp,
3725 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3726 return NULL_RTX;
3728 tree arg1 = CALL_EXPR_ARG (exp, 0);
3729 tree len = CALL_EXPR_ARG (exp, 2);
3731 /* Diagnose calls where the specified length exceeds the size
3732 of the object. */
3733 if (warn_stringop_overflow)
3735 tree size = compute_objsize (arg1, 0);
3736 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3737 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3740 return NULL_RTX;
3743 /* Expand a call EXP to the memcpy builtin.
3744 Return NULL_RTX if we failed, the caller should emit a normal call,
3745 otherwise try to get the result in TARGET, if convenient (and in
3746 mode MODE if that's convenient). */
3748 static rtx
3749 expand_builtin_memcpy (tree exp, rtx target)
3751 if (!validate_arglist (exp,
3752 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3753 return NULL_RTX;
3755 tree dest = CALL_EXPR_ARG (exp, 0);
3756 tree src = CALL_EXPR_ARG (exp, 1);
3757 tree len = CALL_EXPR_ARG (exp, 2);
3759 check_memop_access (exp, dest, src, len);
3761 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3762 /*retmode=*/ RETURN_BEGIN);
3765 /* Check a call EXP to the memmove built-in for validity.
3766 Return NULL_RTX on both success and failure. */
3768 static rtx
3769 expand_builtin_memmove (tree exp, rtx)
3771 if (!validate_arglist (exp,
3772 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3773 return NULL_RTX;
3775 tree dest = CALL_EXPR_ARG (exp, 0);
3776 tree src = CALL_EXPR_ARG (exp, 1);
3777 tree len = CALL_EXPR_ARG (exp, 2);
3779 check_memop_access (exp, dest, src, len);
3781 return NULL_RTX;
3784 /* Expand a call EXP to the mempcpy builtin.
3785 Return NULL_RTX if we failed; the caller should emit a normal call,
3786 otherwise try to get the result in TARGET, if convenient (and in
3787 mode MODE if that's convenient). */
3789 static rtx
3790 expand_builtin_mempcpy (tree exp, rtx target)
3792 if (!validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3794 return NULL_RTX;
3796 tree dest = CALL_EXPR_ARG (exp, 0);
3797 tree src = CALL_EXPR_ARG (exp, 1);
3798 tree len = CALL_EXPR_ARG (exp, 2);
3800 /* Policy does not generally allow using compute_objsize (which
3801 is used internally by check_memop_size) to change code generation
3802 or drive optimization decisions.
3804 In this instance it is safe because the code we generate has
3805 the same semantics regardless of the return value of
3806 check_memop_sizes. Exactly the same amount of data is copied
3807 and the return value is exactly the same in both cases.
3809 Furthermore, check_memop_size always uses mode 0 for the call to
3810 compute_objsize, so the imprecise nature of compute_objsize is
3811 avoided. */
3813 /* Avoid expanding mempcpy into memcpy when the call is determined
3814 to overflow the buffer. This also prevents the same overflow
3815 from being diagnosed again when expanding memcpy. */
3816 if (!check_memop_access (exp, dest, src, len))
3817 return NULL_RTX;
3819 return expand_builtin_mempcpy_args (dest, src, len,
3820 target, exp, /*retmode=*/ RETURN_END);
3823 /* Helper function to do the actual work for expand of memory copy family
3824 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3825 of memory from SRC to DEST and assign to TARGET if convenient. Return
3826 value is based on RETMODE argument. */
3828 static rtx
3829 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3830 rtx target, tree exp, memop_ret retmode)
3832 const char *src_str;
3833 unsigned int src_align = get_pointer_alignment (src);
3834 unsigned int dest_align = get_pointer_alignment (dest);
3835 rtx dest_mem, src_mem, dest_addr, len_rtx;
3836 HOST_WIDE_INT expected_size = -1;
3837 unsigned int expected_align = 0;
3838 unsigned HOST_WIDE_INT min_size;
3839 unsigned HOST_WIDE_INT max_size;
3840 unsigned HOST_WIDE_INT probable_max_size;
3842 bool is_move_done;
3844 /* If DEST is not a pointer type, call the normal function. */
3845 if (dest_align == 0)
3846 return NULL_RTX;
3848 /* If either SRC is not a pointer type, don't do this
3849 operation in-line. */
3850 if (src_align == 0)
3851 return NULL_RTX;
3853 if (currently_expanding_gimple_stmt)
3854 stringop_block_profile (currently_expanding_gimple_stmt,
3855 &expected_align, &expected_size);
3857 if (expected_align < dest_align)
3858 expected_align = dest_align;
3859 dest_mem = get_memory_rtx (dest, len);
3860 set_mem_align (dest_mem, dest_align);
3861 len_rtx = expand_normal (len);
3862 determine_block_size (len, len_rtx, &min_size, &max_size,
3863 &probable_max_size);
3864 src_str = c_getstr (src);
3866 /* If SRC is a string constant and block move would be done
3867 by pieces, we can avoid loading the string from memory
3868 and only stored the computed constants. */
3869 if (src_str
3870 && CONST_INT_P (len_rtx)
3871 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3872 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3873 CONST_CAST (char *, src_str),
3874 dest_align, false))
3876 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3877 builtin_memcpy_read_str,
3878 CONST_CAST (char *, src_str),
3879 dest_align, false, retmode);
3880 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3881 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3882 return dest_mem;
3885 src_mem = get_memory_rtx (src, len);
3886 set_mem_align (src_mem, src_align);
3888 /* Copy word part most expediently. */
3889 enum block_op_methods method = BLOCK_OP_NORMAL;
3890 if (CALL_EXPR_TAILCALL (exp)
3891 && (retmode == RETURN_BEGIN || target == const0_rtx))
3892 method = BLOCK_OP_TAILCALL;
3893 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3894 && retmode == RETURN_END
3895 && target != const0_rtx);
3896 if (use_mempcpy_call)
3897 method = BLOCK_OP_NO_LIBCALL_RET;
3898 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3899 expected_align, expected_size,
3900 min_size, max_size, probable_max_size,
3901 use_mempcpy_call, &is_move_done);
3903 /* Bail out when a mempcpy call would be expanded as libcall and when
3904 we have a target that provides a fast implementation
3905 of mempcpy routine. */
3906 if (!is_move_done)
3907 return NULL_RTX;
3909 if (dest_addr == pc_rtx)
3910 return NULL_RTX;
3912 if (dest_addr == 0)
3914 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3915 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3918 if (retmode != RETURN_BEGIN && target != const0_rtx)
3920 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3921 /* stpcpy pointer to last byte. */
3922 if (retmode == RETURN_END_MINUS_ONE)
3923 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3926 return dest_addr;
3929 static rtx
3930 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3931 rtx target, tree orig_exp, memop_ret retmode)
3933 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3934 retmode);
3937 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3938 we failed, the caller should emit a normal call, otherwise try to
3939 get the result in TARGET, if convenient.
3940 Return value is based on RETMODE argument. */
3942 static rtx
3943 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3945 struct expand_operand ops[3];
3946 rtx dest_mem;
3947 rtx src_mem;
3949 if (!targetm.have_movstr ())
3950 return NULL_RTX;
3952 dest_mem = get_memory_rtx (dest, NULL);
3953 src_mem = get_memory_rtx (src, NULL);
3954 if (retmode == RETURN_BEGIN)
3956 target = force_reg (Pmode, XEXP (dest_mem, 0));
3957 dest_mem = replace_equiv_address (dest_mem, target);
3960 create_output_operand (&ops[0],
3961 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3962 create_fixed_operand (&ops[1], dest_mem);
3963 create_fixed_operand (&ops[2], src_mem);
3964 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3965 return NULL_RTX;
3967 if (retmode != RETURN_BEGIN && target != const0_rtx)
3969 target = ops[0].value;
3970 /* movstr is supposed to set end to the address of the NUL
3971 terminator. If the caller requested a mempcpy-like return value,
3972 adjust it. */
3973 if (retmode == RETURN_END)
3975 rtx tem = plus_constant (GET_MODE (target),
3976 gen_lowpart (GET_MODE (target), target), 1);
3977 emit_move_insn (target, force_operand (tem, NULL_RTX));
3980 return target;
3983 /* Do some very basic size validation of a call to the strcpy builtin
3984 given by EXP. Return NULL_RTX to have the built-in expand to a call
3985 to the library function. */
3987 static rtx
3988 expand_builtin_strcat (tree exp, rtx)
3990 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3991 || !warn_stringop_overflow)
3992 return NULL_RTX;
3994 tree dest = CALL_EXPR_ARG (exp, 0);
3995 tree src = CALL_EXPR_ARG (exp, 1);
3997 /* There is no way here to determine the length of the string in
3998 the destination to which the SRC string is being appended so
3999 just diagnose cases when the souce string is longer than
4000 the destination object. */
4002 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4004 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4005 destsize);
4007 return NULL_RTX;
4010 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4011 NULL_RTX if we failed the caller should emit a normal call, otherwise
4012 try to get the result in TARGET, if convenient (and in mode MODE if that's
4013 convenient). */
4015 static rtx
4016 expand_builtin_strcpy (tree exp, rtx target)
4018 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4019 return NULL_RTX;
4021 tree dest = CALL_EXPR_ARG (exp, 0);
4022 tree src = CALL_EXPR_ARG (exp, 1);
4024 if (warn_stringop_overflow)
4026 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4027 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4028 src, destsize);
4031 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4033 /* Check to see if the argument was declared attribute nonstring
4034 and if so, issue a warning since at this point it's not known
4035 to be nul-terminated. */
4036 tree fndecl = get_callee_fndecl (exp);
4037 maybe_warn_nonstring_arg (fndecl, exp);
4038 return ret;
4041 return NULL_RTX;
4044 /* Helper function to do the actual work for expand_builtin_strcpy. The
4045 arguments to the builtin_strcpy call DEST and SRC are broken out
4046 so that this can also be called without constructing an actual CALL_EXPR.
4047 The other arguments and return value are the same as for
4048 expand_builtin_strcpy. */
4050 static rtx
4051 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4053 /* Detect strcpy calls with unterminated arrays.. */
4054 if (tree nonstr = unterminated_array (src))
4056 /* NONSTR refers to the non-nul terminated constant array. */
4057 if (!TREE_NO_WARNING (exp))
4058 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4059 return NULL_RTX;
4062 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4065 /* Expand a call EXP to the stpcpy builtin.
4066 Return NULL_RTX if we failed the caller should emit a normal call,
4067 otherwise try to get the result in TARGET, if convenient (and in
4068 mode MODE if that's convenient). */
4070 static rtx
4071 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4073 tree dst, src;
4074 location_t loc = EXPR_LOCATION (exp);
4076 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4077 return NULL_RTX;
4079 dst = CALL_EXPR_ARG (exp, 0);
4080 src = CALL_EXPR_ARG (exp, 1);
4082 if (warn_stringop_overflow)
4084 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4085 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4086 src, destsize);
4089 /* If return value is ignored, transform stpcpy into strcpy. */
4090 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4092 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4093 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4094 return expand_expr (result, target, mode, EXPAND_NORMAL);
4096 else
4098 tree len, lenp1;
4099 rtx ret;
4101 /* Ensure we get an actual string whose length can be evaluated at
4102 compile-time, not an expression containing a string. This is
4103 because the latter will potentially produce pessimized code
4104 when used to produce the return value. */
4105 c_strlen_data lendata = { };
4106 if (!c_getstr (src, NULL)
4107 || !(len = c_strlen (src, 0, &lendata, 1)))
4108 return expand_movstr (dst, src, target,
4109 /*retmode=*/ RETURN_END_MINUS_ONE);
4111 if (lendata.decl && !TREE_NO_WARNING (exp))
4112 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4114 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4115 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4116 target, exp,
4117 /*retmode=*/ RETURN_END_MINUS_ONE);
4119 if (ret)
4120 return ret;
4122 if (TREE_CODE (len) == INTEGER_CST)
4124 rtx len_rtx = expand_normal (len);
4126 if (CONST_INT_P (len_rtx))
4128 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4130 if (ret)
4132 if (! target)
4134 if (mode != VOIDmode)
4135 target = gen_reg_rtx (mode);
4136 else
4137 target = gen_reg_rtx (GET_MODE (ret));
4139 if (GET_MODE (target) != GET_MODE (ret))
4140 ret = gen_lowpart (GET_MODE (target), ret);
4142 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4143 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4144 gcc_assert (ret);
4146 return target;
4151 return expand_movstr (dst, src, target,
4152 /*retmode=*/ RETURN_END_MINUS_ONE);
4156 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4157 arguments while being careful to avoid duplicate warnings (which could
4158 be issued if the expander were to expand the call, resulting in it
4159 being emitted in expand_call(). */
4161 static rtx
4162 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4164 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4166 /* The call has been successfully expanded. Check for nonstring
4167 arguments and issue warnings as appropriate. */
4168 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4169 return ret;
4172 return NULL_RTX;
4175 /* Check a call EXP to the stpncpy built-in for validity.
4176 Return NULL_RTX on both success and failure. */
4178 static rtx
4179 expand_builtin_stpncpy (tree exp, rtx)
4181 if (!validate_arglist (exp,
4182 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4183 || !warn_stringop_overflow)
4184 return NULL_RTX;
4186 /* The source and destination of the call. */
4187 tree dest = CALL_EXPR_ARG (exp, 0);
4188 tree src = CALL_EXPR_ARG (exp, 1);
4190 /* The exact number of bytes to write (not the maximum). */
4191 tree len = CALL_EXPR_ARG (exp, 2);
4193 /* The size of the destination object. */
4194 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4196 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4198 return NULL_RTX;
4201 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4202 bytes from constant string DATA + OFFSET and return it as target
4203 constant. */
4206 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4207 scalar_int_mode mode)
4209 const char *str = (const char *) data;
4211 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4212 return const0_rtx;
4214 return c_readstr (str + offset, mode);
4217 /* Helper to check the sizes of sequences and the destination of calls
4218 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4219 success (no overflow or invalid sizes), false otherwise. */
4221 static bool
4222 check_strncat_sizes (tree exp, tree objsize)
4224 tree dest = CALL_EXPR_ARG (exp, 0);
4225 tree src = CALL_EXPR_ARG (exp, 1);
4226 tree maxread = CALL_EXPR_ARG (exp, 2);
4228 /* Try to determine the range of lengths that the source expression
4229 refers to. */
4230 c_strlen_data lendata = { };
4231 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4233 /* Try to verify that the destination is big enough for the shortest
4234 string. */
4236 if (!objsize && warn_stringop_overflow)
4238 /* If it hasn't been provided by __strncat_chk, try to determine
4239 the size of the destination object into which the source is
4240 being copied. */
4241 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4244 /* Add one for the terminating nul. */
4245 tree srclen = (lendata.minlen
4246 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4247 size_one_node)
4248 : NULL_TREE);
4250 /* The strncat function copies at most MAXREAD bytes and always appends
4251 the terminating nul so the specified upper bound should never be equal
4252 to (or greater than) the size of the destination. */
4253 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4254 && tree_int_cst_equal (objsize, maxread))
4256 location_t loc = tree_nonartificial_location (exp);
4257 loc = expansion_point_location_if_in_system_header (loc);
4259 warning_at (loc, OPT_Wstringop_overflow_,
4260 "%K%qD specified bound %E equals destination size",
4261 exp, get_callee_fndecl (exp), maxread);
4263 return false;
4266 if (!srclen
4267 || (maxread && tree_fits_uhwi_p (maxread)
4268 && tree_fits_uhwi_p (srclen)
4269 && tree_int_cst_lt (maxread, srclen)))
4270 srclen = maxread;
4272 /* The number of bytes to write is LEN but check_access will also
4273 check SRCLEN if LEN's value isn't known. */
4274 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4275 objsize);
4278 /* Similar to expand_builtin_strcat, do some very basic size validation
4279 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4280 the built-in expand to a call to the library function. */
4282 static rtx
4283 expand_builtin_strncat (tree exp, rtx)
4285 if (!validate_arglist (exp,
4286 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4287 || !warn_stringop_overflow)
4288 return NULL_RTX;
4290 tree dest = CALL_EXPR_ARG (exp, 0);
4291 tree src = CALL_EXPR_ARG (exp, 1);
4292 /* The upper bound on the number of bytes to write. */
4293 tree maxread = CALL_EXPR_ARG (exp, 2);
4294 /* The length of the source sequence. */
4295 tree slen = c_strlen (src, 1);
4297 /* Try to determine the range of lengths that the source expression
4298 refers to. Since the lengths are only used for warning and not
4299 for code generation disable strict mode below. */
4300 tree maxlen = slen;
4301 if (!maxlen)
4303 c_strlen_data lendata = { };
4304 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4305 maxlen = lendata.maxbound;
4308 /* Try to verify that the destination is big enough for the shortest
4309 string. First try to determine the size of the destination object
4310 into which the source is being copied. */
4311 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4313 /* Add one for the terminating nul. */
4314 tree srclen = (maxlen
4315 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4316 size_one_node)
4317 : NULL_TREE);
4319 /* The strncat function copies at most MAXREAD bytes and always appends
4320 the terminating nul so the specified upper bound should never be equal
4321 to (or greater than) the size of the destination. */
4322 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4323 && tree_int_cst_equal (destsize, maxread))
4325 location_t loc = tree_nonartificial_location (exp);
4326 loc = expansion_point_location_if_in_system_header (loc);
4328 warning_at (loc, OPT_Wstringop_overflow_,
4329 "%K%qD specified bound %E equals destination size",
4330 exp, get_callee_fndecl (exp), maxread);
4332 return NULL_RTX;
4335 if (!srclen
4336 || (maxread && tree_fits_uhwi_p (maxread)
4337 && tree_fits_uhwi_p (srclen)
4338 && tree_int_cst_lt (maxread, srclen)))
4339 srclen = maxread;
4341 /* The number of bytes to write is SRCLEN. */
4342 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4344 return NULL_RTX;
4347 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4348 NULL_RTX if we failed the caller should emit a normal call. */
4350 static rtx
4351 expand_builtin_strncpy (tree exp, rtx target)
4353 location_t loc = EXPR_LOCATION (exp);
4355 if (validate_arglist (exp,
4356 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4358 tree dest = CALL_EXPR_ARG (exp, 0);
4359 tree src = CALL_EXPR_ARG (exp, 1);
4360 /* The number of bytes to write (not the maximum). */
4361 tree len = CALL_EXPR_ARG (exp, 2);
4362 /* The length of the source sequence. */
4363 tree slen = c_strlen (src, 1);
4365 if (warn_stringop_overflow)
4367 tree destsize = compute_objsize (dest,
4368 warn_stringop_overflow - 1);
4370 /* The number of bytes to write is LEN but check_access will also
4371 check SLEN if LEN's value isn't known. */
4372 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4373 destsize);
4376 /* We must be passed a constant len and src parameter. */
4377 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4378 return NULL_RTX;
4380 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4382 /* We're required to pad with trailing zeros if the requested
4383 len is greater than strlen(s2)+1. In that case try to
4384 use store_by_pieces, if it fails, punt. */
4385 if (tree_int_cst_lt (slen, len))
4387 unsigned int dest_align = get_pointer_alignment (dest);
4388 const char *p = c_getstr (src);
4389 rtx dest_mem;
4391 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4392 || !can_store_by_pieces (tree_to_uhwi (len),
4393 builtin_strncpy_read_str,
4394 CONST_CAST (char *, p),
4395 dest_align, false))
4396 return NULL_RTX;
4398 dest_mem = get_memory_rtx (dest, len);
4399 store_by_pieces (dest_mem, tree_to_uhwi (len),
4400 builtin_strncpy_read_str,
4401 CONST_CAST (char *, p), dest_align, false,
4402 RETURN_BEGIN);
4403 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4404 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4405 return dest_mem;
4408 return NULL_RTX;
4411 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4412 bytes from constant string DATA + OFFSET and return it as target
4413 constant. */
4416 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4417 scalar_int_mode mode)
4419 const char *c = (const char *) data;
4420 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4422 memset (p, *c, GET_MODE_SIZE (mode));
4424 return c_readstr (p, mode);
4427 /* Callback routine for store_by_pieces. Return the RTL of a register
4428 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4429 char value given in the RTL register data. For example, if mode is
4430 4 bytes wide, return the RTL for 0x01010101*data. */
4432 static rtx
4433 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4434 scalar_int_mode mode)
4436 rtx target, coeff;
4437 size_t size;
4438 char *p;
4440 size = GET_MODE_SIZE (mode);
4441 if (size == 1)
4442 return (rtx) data;
4444 p = XALLOCAVEC (char, size);
4445 memset (p, 1, size);
4446 coeff = c_readstr (p, mode);
4448 target = convert_to_mode (mode, (rtx) data, 1);
4449 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4450 return force_reg (mode, target);
4453 /* Expand expression EXP, which is a call to the memset builtin. Return
4454 NULL_RTX if we failed the caller should emit a normal call, otherwise
4455 try to get the result in TARGET, if convenient (and in mode MODE if that's
4456 convenient). */
4458 static rtx
4459 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4461 if (!validate_arglist (exp,
4462 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4463 return NULL_RTX;
4465 tree dest = CALL_EXPR_ARG (exp, 0);
4466 tree val = CALL_EXPR_ARG (exp, 1);
4467 tree len = CALL_EXPR_ARG (exp, 2);
4469 check_memop_access (exp, dest, NULL_TREE, len);
4471 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4474 /* Helper function to do the actual work for expand_builtin_memset. The
4475 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4476 so that this can also be called without constructing an actual CALL_EXPR.
4477 The other arguments and return value are the same as for
4478 expand_builtin_memset. */
4480 static rtx
4481 expand_builtin_memset_args (tree dest, tree val, tree len,
4482 rtx target, machine_mode mode, tree orig_exp)
4484 tree fndecl, fn;
4485 enum built_in_function fcode;
4486 machine_mode val_mode;
4487 char c;
4488 unsigned int dest_align;
4489 rtx dest_mem, dest_addr, len_rtx;
4490 HOST_WIDE_INT expected_size = -1;
4491 unsigned int expected_align = 0;
4492 unsigned HOST_WIDE_INT min_size;
4493 unsigned HOST_WIDE_INT max_size;
4494 unsigned HOST_WIDE_INT probable_max_size;
4496 dest_align = get_pointer_alignment (dest);
4498 /* If DEST is not a pointer type, don't do this operation in-line. */
4499 if (dest_align == 0)
4500 return NULL_RTX;
4502 if (currently_expanding_gimple_stmt)
4503 stringop_block_profile (currently_expanding_gimple_stmt,
4504 &expected_align, &expected_size);
4506 if (expected_align < dest_align)
4507 expected_align = dest_align;
4509 /* If the LEN parameter is zero, return DEST. */
4510 if (integer_zerop (len))
4512 /* Evaluate and ignore VAL in case it has side-effects. */
4513 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4514 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4517 /* Stabilize the arguments in case we fail. */
4518 dest = builtin_save_expr (dest);
4519 val = builtin_save_expr (val);
4520 len = builtin_save_expr (len);
4522 len_rtx = expand_normal (len);
4523 determine_block_size (len, len_rtx, &min_size, &max_size,
4524 &probable_max_size);
4525 dest_mem = get_memory_rtx (dest, len);
4526 val_mode = TYPE_MODE (unsigned_char_type_node);
4528 if (TREE_CODE (val) != INTEGER_CST)
4530 rtx val_rtx;
4532 val_rtx = expand_normal (val);
4533 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4535 /* Assume that we can memset by pieces if we can store
4536 * the coefficients by pieces (in the required modes).
4537 * We can't pass builtin_memset_gen_str as that emits RTL. */
4538 c = 1;
4539 if (tree_fits_uhwi_p (len)
4540 && can_store_by_pieces (tree_to_uhwi (len),
4541 builtin_memset_read_str, &c, dest_align,
4542 true))
4544 val_rtx = force_reg (val_mode, val_rtx);
4545 store_by_pieces (dest_mem, tree_to_uhwi (len),
4546 builtin_memset_gen_str, val_rtx, dest_align,
4547 true, RETURN_BEGIN);
4549 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4550 dest_align, expected_align,
4551 expected_size, min_size, max_size,
4552 probable_max_size))
4553 goto do_libcall;
4555 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4556 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4557 return dest_mem;
4560 if (target_char_cast (val, &c))
4561 goto do_libcall;
4563 if (c)
4565 if (tree_fits_uhwi_p (len)
4566 && can_store_by_pieces (tree_to_uhwi (len),
4567 builtin_memset_read_str, &c, dest_align,
4568 true))
4569 store_by_pieces (dest_mem, tree_to_uhwi (len),
4570 builtin_memset_read_str, &c, dest_align, true,
4571 RETURN_BEGIN);
4572 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4573 gen_int_mode (c, val_mode),
4574 dest_align, expected_align,
4575 expected_size, min_size, max_size,
4576 probable_max_size))
4577 goto do_libcall;
4579 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4580 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4581 return dest_mem;
4584 set_mem_align (dest_mem, dest_align);
4585 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4586 CALL_EXPR_TAILCALL (orig_exp)
4587 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4588 expected_align, expected_size,
4589 min_size, max_size,
4590 probable_max_size);
4592 if (dest_addr == 0)
4594 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4595 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4598 return dest_addr;
4600 do_libcall:
4601 fndecl = get_callee_fndecl (orig_exp);
4602 fcode = DECL_FUNCTION_CODE (fndecl);
4603 if (fcode == BUILT_IN_MEMSET)
4604 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4605 dest, val, len);
4606 else if (fcode == BUILT_IN_BZERO)
4607 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4608 dest, len);
4609 else
4610 gcc_unreachable ();
4611 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4612 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4613 return expand_call (fn, target, target == const0_rtx);
4616 /* Expand expression EXP, which is a call to the bzero builtin. Return
4617 NULL_RTX if we failed the caller should emit a normal call. */
4619 static rtx
4620 expand_builtin_bzero (tree exp)
4622 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4623 return NULL_RTX;
4625 tree dest = CALL_EXPR_ARG (exp, 0);
4626 tree size = CALL_EXPR_ARG (exp, 1);
4628 check_memop_access (exp, dest, NULL_TREE, size);
4630 /* New argument list transforming bzero(ptr x, int y) to
4631 memset(ptr x, int 0, size_t y). This is done this way
4632 so that if it isn't expanded inline, we fallback to
4633 calling bzero instead of memset. */
4635 location_t loc = EXPR_LOCATION (exp);
4637 return expand_builtin_memset_args (dest, integer_zero_node,
4638 fold_convert_loc (loc,
4639 size_type_node, size),
4640 const0_rtx, VOIDmode, exp);
4643 /* Try to expand cmpstr operation ICODE with the given operands.
4644 Return the result rtx on success, otherwise return null. */
4646 static rtx
4647 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4648 HOST_WIDE_INT align)
4650 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4652 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4653 target = NULL_RTX;
4655 struct expand_operand ops[4];
4656 create_output_operand (&ops[0], target, insn_mode);
4657 create_fixed_operand (&ops[1], arg1_rtx);
4658 create_fixed_operand (&ops[2], arg2_rtx);
4659 create_integer_operand (&ops[3], align);
4660 if (maybe_expand_insn (icode, 4, ops))
4661 return ops[0].value;
4662 return NULL_RTX;
4665 /* Expand expression EXP, which is a call to the memcmp built-in function.
4666 Return NULL_RTX if we failed and the caller should emit a normal call,
4667 otherwise try to get the result in TARGET, if convenient.
4668 RESULT_EQ is true if we can relax the returned value to be either zero
4669 or nonzero, without caring about the sign. */
4671 static rtx
4672 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4674 if (!validate_arglist (exp,
4675 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4676 return NULL_RTX;
4678 tree arg1 = CALL_EXPR_ARG (exp, 0);
4679 tree arg2 = CALL_EXPR_ARG (exp, 1);
4680 tree len = CALL_EXPR_ARG (exp, 2);
4681 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4682 bool no_overflow = true;
4684 /* Diagnose calls where the specified length exceeds the size of either
4685 object. */
4686 tree size = compute_objsize (arg1, 0);
4687 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4688 len, /*maxread=*/NULL_TREE, size,
4689 /*objsize=*/NULL_TREE);
4690 if (no_overflow)
4692 size = compute_objsize (arg2, 0);
4693 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4694 len, /*maxread=*/NULL_TREE, size,
4695 /*objsize=*/NULL_TREE);
4698 /* If the specified length exceeds the size of either object,
4699 call the function. */
4700 if (!no_overflow)
4701 return NULL_RTX;
4703 /* Due to the performance benefit, always inline the calls first
4704 when result_eq is false. */
4705 rtx result = NULL_RTX;
4707 if (!result_eq && fcode != BUILT_IN_BCMP)
4709 result = inline_expand_builtin_string_cmp (exp, target);
4710 if (result)
4711 return result;
4714 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4715 location_t loc = EXPR_LOCATION (exp);
4717 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4718 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4720 /* If we don't have POINTER_TYPE, call the function. */
4721 if (arg1_align == 0 || arg2_align == 0)
4722 return NULL_RTX;
4724 rtx arg1_rtx = get_memory_rtx (arg1, len);
4725 rtx arg2_rtx = get_memory_rtx (arg2, len);
4726 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4728 /* Set MEM_SIZE as appropriate. */
4729 if (CONST_INT_P (len_rtx))
4731 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4732 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4735 by_pieces_constfn constfn = NULL;
4737 const char *src_str = c_getstr (arg2);
4738 if (result_eq && src_str == NULL)
4740 src_str = c_getstr (arg1);
4741 if (src_str != NULL)
4742 std::swap (arg1_rtx, arg2_rtx);
4745 /* If SRC is a string constant and block move would be done
4746 by pieces, we can avoid loading the string from memory
4747 and only stored the computed constants. */
4748 if (src_str
4749 && CONST_INT_P (len_rtx)
4750 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4751 constfn = builtin_memcpy_read_str;
4753 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4754 TREE_TYPE (len), target,
4755 result_eq, constfn,
4756 CONST_CAST (char *, src_str));
4758 if (result)
4760 /* Return the value in the proper mode for this function. */
4761 if (GET_MODE (result) == mode)
4762 return result;
4764 if (target != 0)
4766 convert_move (target, result, 0);
4767 return target;
4770 return convert_to_mode (mode, result, 0);
4773 return NULL_RTX;
4776 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4777 if we failed the caller should emit a normal call, otherwise try to get
4778 the result in TARGET, if convenient. */
4780 static rtx
4781 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4784 return NULL_RTX;
4786 /* Due to the performance benefit, always inline the calls first. */
4787 rtx result = NULL_RTX;
4788 result = inline_expand_builtin_string_cmp (exp, target);
4789 if (result)
4790 return result;
4792 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4793 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4794 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4795 return NULL_RTX;
4797 tree arg1 = CALL_EXPR_ARG (exp, 0);
4798 tree arg2 = CALL_EXPR_ARG (exp, 1);
4800 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4801 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4803 /* If we don't have POINTER_TYPE, call the function. */
4804 if (arg1_align == 0 || arg2_align == 0)
4805 return NULL_RTX;
4807 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4808 arg1 = builtin_save_expr (arg1);
4809 arg2 = builtin_save_expr (arg2);
4811 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4812 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4814 /* Try to call cmpstrsi. */
4815 if (cmpstr_icode != CODE_FOR_nothing)
4816 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4817 MIN (arg1_align, arg2_align));
4819 /* Try to determine at least one length and call cmpstrnsi. */
4820 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4822 tree len;
4823 rtx arg3_rtx;
4825 tree len1 = c_strlen (arg1, 1);
4826 tree len2 = c_strlen (arg2, 1);
4828 if (len1)
4829 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4830 if (len2)
4831 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4833 /* If we don't have a constant length for the first, use the length
4834 of the second, if we know it. We don't require a constant for
4835 this case; some cost analysis could be done if both are available
4836 but neither is constant. For now, assume they're equally cheap,
4837 unless one has side effects. If both strings have constant lengths,
4838 use the smaller. */
4840 if (!len1)
4841 len = len2;
4842 else if (!len2)
4843 len = len1;
4844 else if (TREE_SIDE_EFFECTS (len1))
4845 len = len2;
4846 else if (TREE_SIDE_EFFECTS (len2))
4847 len = len1;
4848 else if (TREE_CODE (len1) != INTEGER_CST)
4849 len = len2;
4850 else if (TREE_CODE (len2) != INTEGER_CST)
4851 len = len1;
4852 else if (tree_int_cst_lt (len1, len2))
4853 len = len1;
4854 else
4855 len = len2;
4857 /* If both arguments have side effects, we cannot optimize. */
4858 if (len && !TREE_SIDE_EFFECTS (len))
4860 arg3_rtx = expand_normal (len);
4861 result = expand_cmpstrn_or_cmpmem
4862 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4863 arg3_rtx, MIN (arg1_align, arg2_align));
4867 tree fndecl = get_callee_fndecl (exp);
4868 if (result)
4870 /* Check to see if the argument was declared attribute nonstring
4871 and if so, issue a warning since at this point it's not known
4872 to be nul-terminated. */
4873 maybe_warn_nonstring_arg (fndecl, exp);
4875 /* Return the value in the proper mode for this function. */
4876 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4877 if (GET_MODE (result) == mode)
4878 return result;
4879 if (target == 0)
4880 return convert_to_mode (mode, result, 0);
4881 convert_move (target, result, 0);
4882 return target;
4885 /* Expand the library call ourselves using a stabilized argument
4886 list to avoid re-evaluating the function's arguments twice. */
4887 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4890 return expand_call (fn, target, target == const0_rtx);
4893 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4894 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4895 the result in TARGET, if convenient. */
4897 static rtx
4898 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4899 ATTRIBUTE_UNUSED machine_mode mode)
4901 if (!validate_arglist (exp,
4902 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4903 return NULL_RTX;
4905 /* Due to the performance benefit, always inline the calls first. */
4906 rtx result = NULL_RTX;
4907 result = inline_expand_builtin_string_cmp (exp, target);
4908 if (result)
4909 return result;
4911 /* If c_strlen can determine an expression for one of the string
4912 lengths, and it doesn't have side effects, then emit cmpstrnsi
4913 using length MIN(strlen(string)+1, arg3). */
4914 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4915 if (cmpstrn_icode == CODE_FOR_nothing)
4916 return NULL_RTX;
4918 tree len;
4920 tree arg1 = CALL_EXPR_ARG (exp, 0);
4921 tree arg2 = CALL_EXPR_ARG (exp, 1);
4922 tree arg3 = CALL_EXPR_ARG (exp, 2);
4924 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4925 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4927 tree len1 = c_strlen (arg1, 1);
4928 tree len2 = c_strlen (arg2, 1);
4930 location_t loc = EXPR_LOCATION (exp);
4932 if (len1)
4933 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4934 if (len2)
4935 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4937 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4939 /* If we don't have a constant length for the first, use the length
4940 of the second, if we know it. If neither string is constant length,
4941 use the given length argument. We don't require a constant for
4942 this case; some cost analysis could be done if both are available
4943 but neither is constant. For now, assume they're equally cheap,
4944 unless one has side effects. If both strings have constant lengths,
4945 use the smaller. */
4947 if (!len1 && !len2)
4948 len = len3;
4949 else if (!len1)
4950 len = len2;
4951 else if (!len2)
4952 len = len1;
4953 else if (TREE_SIDE_EFFECTS (len1))
4954 len = len2;
4955 else if (TREE_SIDE_EFFECTS (len2))
4956 len = len1;
4957 else if (TREE_CODE (len1) != INTEGER_CST)
4958 len = len2;
4959 else if (TREE_CODE (len2) != INTEGER_CST)
4960 len = len1;
4961 else if (tree_int_cst_lt (len1, len2))
4962 len = len1;
4963 else
4964 len = len2;
4966 /* If we are not using the given length, we must incorporate it here.
4967 The actual new length parameter will be MIN(len,arg3) in this case. */
4968 if (len != len3)
4970 len = fold_convert_loc (loc, sizetype, len);
4971 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4973 rtx arg1_rtx = get_memory_rtx (arg1, len);
4974 rtx arg2_rtx = get_memory_rtx (arg2, len);
4975 rtx arg3_rtx = expand_normal (len);
4976 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4977 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4978 MIN (arg1_align, arg2_align));
4980 tree fndecl = get_callee_fndecl (exp);
4981 if (result)
4983 /* Check to see if the argument was declared attribute nonstring
4984 and if so, issue a warning since at this point it's not known
4985 to be nul-terminated. */
4986 maybe_warn_nonstring_arg (fndecl, exp);
4988 /* Return the value in the proper mode for this function. */
4989 mode = TYPE_MODE (TREE_TYPE (exp));
4990 if (GET_MODE (result) == mode)
4991 return result;
4992 if (target == 0)
4993 return convert_to_mode (mode, result, 0);
4994 convert_move (target, result, 0);
4995 return target;
4998 /* Expand the library call ourselves using a stabilized argument
4999 list to avoid re-evaluating the function's arguments twice. */
5000 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5001 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5002 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5003 return expand_call (fn, target, target == const0_rtx);
5006 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5007 if that's convenient. */
5010 expand_builtin_saveregs (void)
5012 rtx val;
5013 rtx_insn *seq;
5015 /* Don't do __builtin_saveregs more than once in a function.
5016 Save the result of the first call and reuse it. */
5017 if (saveregs_value != 0)
5018 return saveregs_value;
5020 /* When this function is called, it means that registers must be
5021 saved on entry to this function. So we migrate the call to the
5022 first insn of this function. */
5024 start_sequence ();
5026 /* Do whatever the machine needs done in this case. */
5027 val = targetm.calls.expand_builtin_saveregs ();
5029 seq = get_insns ();
5030 end_sequence ();
5032 saveregs_value = val;
5034 /* Put the insns after the NOTE that starts the function. If this
5035 is inside a start_sequence, make the outer-level insn chain current, so
5036 the code is placed at the start of the function. */
5037 push_topmost_sequence ();
5038 emit_insn_after (seq, entry_of_function ());
5039 pop_topmost_sequence ();
5041 return val;
5044 /* Expand a call to __builtin_next_arg. */
5046 static rtx
5047 expand_builtin_next_arg (void)
5049 /* Checking arguments is already done in fold_builtin_next_arg
5050 that must be called before this function. */
5051 return expand_binop (ptr_mode, add_optab,
5052 crtl->args.internal_arg_pointer,
5053 crtl->args.arg_offset_rtx,
5054 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5057 /* Make it easier for the backends by protecting the valist argument
5058 from multiple evaluations. */
5060 static tree
5061 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5063 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5065 /* The current way of determining the type of valist is completely
5066 bogus. We should have the information on the va builtin instead. */
5067 if (!vatype)
5068 vatype = targetm.fn_abi_va_list (cfun->decl);
5070 if (TREE_CODE (vatype) == ARRAY_TYPE)
5072 if (TREE_SIDE_EFFECTS (valist))
5073 valist = save_expr (valist);
5075 /* For this case, the backends will be expecting a pointer to
5076 vatype, but it's possible we've actually been given an array
5077 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5078 So fix it. */
5079 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5081 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5082 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5085 else
5087 tree pt = build_pointer_type (vatype);
5089 if (! needs_lvalue)
5091 if (! TREE_SIDE_EFFECTS (valist))
5092 return valist;
5094 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5095 TREE_SIDE_EFFECTS (valist) = 1;
5098 if (TREE_SIDE_EFFECTS (valist))
5099 valist = save_expr (valist);
5100 valist = fold_build2_loc (loc, MEM_REF,
5101 vatype, valist, build_int_cst (pt, 0));
5104 return valist;
5107 /* The "standard" definition of va_list is void*. */
5109 tree
5110 std_build_builtin_va_list (void)
5112 return ptr_type_node;
5115 /* The "standard" abi va_list is va_list_type_node. */
5117 tree
5118 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5120 return va_list_type_node;
5123 /* The "standard" type of va_list is va_list_type_node. */
5125 tree
5126 std_canonical_va_list_type (tree type)
5128 tree wtype, htype;
5130 wtype = va_list_type_node;
5131 htype = type;
5133 if (TREE_CODE (wtype) == ARRAY_TYPE)
5135 /* If va_list is an array type, the argument may have decayed
5136 to a pointer type, e.g. by being passed to another function.
5137 In that case, unwrap both types so that we can compare the
5138 underlying records. */
5139 if (TREE_CODE (htype) == ARRAY_TYPE
5140 || POINTER_TYPE_P (htype))
5142 wtype = TREE_TYPE (wtype);
5143 htype = TREE_TYPE (htype);
5146 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5147 return va_list_type_node;
5149 return NULL_TREE;
5152 /* The "standard" implementation of va_start: just assign `nextarg' to
5153 the variable. */
5155 void
5156 std_expand_builtin_va_start (tree valist, rtx nextarg)
5158 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5159 convert_move (va_r, nextarg, 0);
5162 /* Expand EXP, a call to __builtin_va_start. */
5164 static rtx
5165 expand_builtin_va_start (tree exp)
5167 rtx nextarg;
5168 tree valist;
5169 location_t loc = EXPR_LOCATION (exp);
5171 if (call_expr_nargs (exp) < 2)
5173 error_at (loc, "too few arguments to function %<va_start%>");
5174 return const0_rtx;
5177 if (fold_builtin_next_arg (exp, true))
5178 return const0_rtx;
5180 nextarg = expand_builtin_next_arg ();
5181 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5183 if (targetm.expand_builtin_va_start)
5184 targetm.expand_builtin_va_start (valist, nextarg);
5185 else
5186 std_expand_builtin_va_start (valist, nextarg);
5188 return const0_rtx;
5191 /* Expand EXP, a call to __builtin_va_end. */
5193 static rtx
5194 expand_builtin_va_end (tree exp)
5196 tree valist = CALL_EXPR_ARG (exp, 0);
5198 /* Evaluate for side effects, if needed. I hate macros that don't
5199 do that. */
5200 if (TREE_SIDE_EFFECTS (valist))
5201 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5203 return const0_rtx;
5206 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5207 builtin rather than just as an assignment in stdarg.h because of the
5208 nastiness of array-type va_list types. */
5210 static rtx
5211 expand_builtin_va_copy (tree exp)
5213 tree dst, src, t;
5214 location_t loc = EXPR_LOCATION (exp);
5216 dst = CALL_EXPR_ARG (exp, 0);
5217 src = CALL_EXPR_ARG (exp, 1);
5219 dst = stabilize_va_list_loc (loc, dst, 1);
5220 src = stabilize_va_list_loc (loc, src, 0);
5222 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5224 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5226 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5227 TREE_SIDE_EFFECTS (t) = 1;
5228 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5230 else
5232 rtx dstb, srcb, size;
5234 /* Evaluate to pointers. */
5235 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5236 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5237 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5238 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5240 dstb = convert_memory_address (Pmode, dstb);
5241 srcb = convert_memory_address (Pmode, srcb);
5243 /* "Dereference" to BLKmode memories. */
5244 dstb = gen_rtx_MEM (BLKmode, dstb);
5245 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5246 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5247 srcb = gen_rtx_MEM (BLKmode, srcb);
5248 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5249 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5251 /* Copy. */
5252 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5255 return const0_rtx;
5258 /* Expand a call to one of the builtin functions __builtin_frame_address or
5259 __builtin_return_address. */
5261 static rtx
5262 expand_builtin_frame_address (tree fndecl, tree exp)
5264 /* The argument must be a nonnegative integer constant.
5265 It counts the number of frames to scan up the stack.
5266 The value is either the frame pointer value or the return
5267 address saved in that frame. */
5268 if (call_expr_nargs (exp) == 0)
5269 /* Warning about missing arg was already issued. */
5270 return const0_rtx;
5271 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5273 error ("invalid argument to %qD", fndecl);
5274 return const0_rtx;
5276 else
5278 /* Number of frames to scan up the stack. */
5279 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5281 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5283 /* Some ports cannot access arbitrary stack frames. */
5284 if (tem == NULL)
5286 warning (0, "unsupported argument to %qD", fndecl);
5287 return const0_rtx;
5290 if (count)
5292 /* Warn since no effort is made to ensure that any frame
5293 beyond the current one exists or can be safely reached. */
5294 warning (OPT_Wframe_address, "calling %qD with "
5295 "a nonzero argument is unsafe", fndecl);
5298 /* For __builtin_frame_address, return what we've got. */
5299 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5300 return tem;
5302 if (!REG_P (tem)
5303 && ! CONSTANT_P (tem))
5304 tem = copy_addr_to_reg (tem);
5305 return tem;
5309 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5310 failed and the caller should emit a normal call. */
5312 static rtx
5313 expand_builtin_alloca (tree exp)
5315 rtx op0;
5316 rtx result;
5317 unsigned int align;
5318 tree fndecl = get_callee_fndecl (exp);
5319 HOST_WIDE_INT max_size;
5320 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5321 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5322 bool valid_arglist
5323 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5324 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5325 VOID_TYPE)
5326 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5327 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5328 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5330 if (!valid_arglist)
5331 return NULL_RTX;
5333 if ((alloca_for_var
5334 && warn_vla_limit >= HOST_WIDE_INT_MAX
5335 && warn_alloc_size_limit < warn_vla_limit)
5336 || (!alloca_for_var
5337 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5338 && warn_alloc_size_limit < warn_alloca_limit
5341 /* -Walloca-larger-than and -Wvla-larger-than settings of
5342 less than HOST_WIDE_INT_MAX override the more general
5343 -Walloc-size-larger-than so unless either of the former
5344 options is smaller than the last one (wchich would imply
5345 that the call was already checked), check the alloca
5346 arguments for overflow. */
5347 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5348 int idx[] = { 0, -1 };
5349 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5352 /* Compute the argument. */
5353 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5355 /* Compute the alignment. */
5356 align = (fcode == BUILT_IN_ALLOCA
5357 ? BIGGEST_ALIGNMENT
5358 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5360 /* Compute the maximum size. */
5361 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5362 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5363 : -1);
5365 /* Allocate the desired space. If the allocation stems from the declaration
5366 of a variable-sized object, it cannot accumulate. */
5367 result
5368 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5369 result = convert_memory_address (ptr_mode, result);
5371 return result;
5374 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5375 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5376 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5377 handle_builtin_stack_restore function. */
5379 static rtx
5380 expand_asan_emit_allocas_unpoison (tree exp)
5382 tree arg0 = CALL_EXPR_ARG (exp, 0);
5383 tree arg1 = CALL_EXPR_ARG (exp, 1);
5384 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5385 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5386 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5387 stack_pointer_rtx, NULL_RTX, 0,
5388 OPTAB_LIB_WIDEN);
5389 off = convert_modes (ptr_mode, Pmode, off, 0);
5390 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5391 OPTAB_LIB_WIDEN);
5392 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5393 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5394 top, ptr_mode, bot, ptr_mode);
5395 return ret;
5398 /* Expand a call to bswap builtin in EXP.
5399 Return NULL_RTX if a normal call should be emitted rather than expanding the
5400 function in-line. If convenient, the result should be placed in TARGET.
5401 SUBTARGET may be used as the target for computing one of EXP's operands. */
5403 static rtx
5404 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5405 rtx subtarget)
5407 tree arg;
5408 rtx op0;
5410 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5411 return NULL_RTX;
5413 arg = CALL_EXPR_ARG (exp, 0);
5414 op0 = expand_expr (arg,
5415 subtarget && GET_MODE (subtarget) == target_mode
5416 ? subtarget : NULL_RTX,
5417 target_mode, EXPAND_NORMAL);
5418 if (GET_MODE (op0) != target_mode)
5419 op0 = convert_to_mode (target_mode, op0, 1);
5421 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5423 gcc_assert (target);
5425 return convert_to_mode (target_mode, target, 1);
5428 /* Expand a call to a unary builtin in EXP.
5429 Return NULL_RTX if a normal call should be emitted rather than expanding the
5430 function in-line. If convenient, the result should be placed in TARGET.
5431 SUBTARGET may be used as the target for computing one of EXP's operands. */
5433 static rtx
5434 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5435 rtx subtarget, optab op_optab)
5437 rtx op0;
5439 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5440 return NULL_RTX;
5442 /* Compute the argument. */
5443 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5444 (subtarget
5445 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5446 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5447 VOIDmode, EXPAND_NORMAL);
5448 /* Compute op, into TARGET if possible.
5449 Set TARGET to wherever the result comes back. */
5450 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5451 op_optab, op0, target, op_optab != clrsb_optab);
5452 gcc_assert (target);
5454 return convert_to_mode (target_mode, target, 0);
5457 /* Expand a call to __builtin_expect. We just return our argument
5458 as the builtin_expect semantic should've been already executed by
5459 tree branch prediction pass. */
5461 static rtx
5462 expand_builtin_expect (tree exp, rtx target)
5464 tree arg;
5466 if (call_expr_nargs (exp) < 2)
5467 return const0_rtx;
5468 arg = CALL_EXPR_ARG (exp, 0);
5470 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5471 /* When guessing was done, the hints should be already stripped away. */
5472 gcc_assert (!flag_guess_branch_prob
5473 || optimize == 0 || seen_error ());
5474 return target;
5477 /* Expand a call to __builtin_expect_with_probability. We just return our
5478 argument as the builtin_expect semantic should've been already executed by
5479 tree branch prediction pass. */
5481 static rtx
5482 expand_builtin_expect_with_probability (tree exp, rtx target)
5484 tree arg;
5486 if (call_expr_nargs (exp) < 3)
5487 return const0_rtx;
5488 arg = CALL_EXPR_ARG (exp, 0);
5490 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5491 /* When guessing was done, the hints should be already stripped away. */
5492 gcc_assert (!flag_guess_branch_prob
5493 || optimize == 0 || seen_error ());
5494 return target;
5498 /* Expand a call to __builtin_assume_aligned. We just return our first
5499 argument as the builtin_assume_aligned semantic should've been already
5500 executed by CCP. */
5502 static rtx
5503 expand_builtin_assume_aligned (tree exp, rtx target)
5505 if (call_expr_nargs (exp) < 2)
5506 return const0_rtx;
5507 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5508 EXPAND_NORMAL);
5509 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5510 && (call_expr_nargs (exp) < 3
5511 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5512 return target;
5515 void
5516 expand_builtin_trap (void)
5518 if (targetm.have_trap ())
5520 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5521 /* For trap insns when not accumulating outgoing args force
5522 REG_ARGS_SIZE note to prevent crossjumping of calls with
5523 different args sizes. */
5524 if (!ACCUMULATE_OUTGOING_ARGS)
5525 add_args_size_note (insn, stack_pointer_delta);
5527 else
5529 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5530 tree call_expr = build_call_expr (fn, 0);
5531 expand_call (call_expr, NULL_RTX, false);
5534 emit_barrier ();
5537 /* Expand a call to __builtin_unreachable. We do nothing except emit
5538 a barrier saying that control flow will not pass here.
5540 It is the responsibility of the program being compiled to ensure
5541 that control flow does never reach __builtin_unreachable. */
5542 static void
5543 expand_builtin_unreachable (void)
5545 emit_barrier ();
5548 /* Expand EXP, a call to fabs, fabsf or fabsl.
5549 Return NULL_RTX if a normal call should be emitted rather than expanding
5550 the function inline. If convenient, the result should be placed
5551 in TARGET. SUBTARGET may be used as the target for computing
5552 the operand. */
5554 static rtx
5555 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5557 machine_mode mode;
5558 tree arg;
5559 rtx op0;
5561 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5562 return NULL_RTX;
5564 arg = CALL_EXPR_ARG (exp, 0);
5565 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5566 mode = TYPE_MODE (TREE_TYPE (arg));
5567 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5568 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5571 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5572 Return NULL is a normal call should be emitted rather than expanding the
5573 function inline. If convenient, the result should be placed in TARGET.
5574 SUBTARGET may be used as the target for computing the operand. */
5576 static rtx
5577 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5579 rtx op0, op1;
5580 tree arg;
5582 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5583 return NULL_RTX;
5585 arg = CALL_EXPR_ARG (exp, 0);
5586 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5588 arg = CALL_EXPR_ARG (exp, 1);
5589 op1 = expand_normal (arg);
5591 return expand_copysign (op0, op1, target);
5594 /* Expand a call to __builtin___clear_cache. */
5596 static rtx
5597 expand_builtin___clear_cache (tree exp)
5599 if (!targetm.code_for_clear_cache)
5601 #ifdef CLEAR_INSN_CACHE
5602 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5603 does something. Just do the default expansion to a call to
5604 __clear_cache(). */
5605 return NULL_RTX;
5606 #else
5607 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5608 does nothing. There is no need to call it. Do nothing. */
5609 return const0_rtx;
5610 #endif /* CLEAR_INSN_CACHE */
5613 /* We have a "clear_cache" insn, and it will handle everything. */
5614 tree begin, end;
5615 rtx begin_rtx, end_rtx;
5617 /* We must not expand to a library call. If we did, any
5618 fallback library function in libgcc that might contain a call to
5619 __builtin___clear_cache() would recurse infinitely. */
5620 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5622 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5623 return const0_rtx;
5626 if (targetm.have_clear_cache ())
5628 struct expand_operand ops[2];
5630 begin = CALL_EXPR_ARG (exp, 0);
5631 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5633 end = CALL_EXPR_ARG (exp, 1);
5634 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5636 create_address_operand (&ops[0], begin_rtx);
5637 create_address_operand (&ops[1], end_rtx);
5638 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5639 return const0_rtx;
5641 return const0_rtx;
5644 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5646 static rtx
5647 round_trampoline_addr (rtx tramp)
5649 rtx temp, addend, mask;
5651 /* If we don't need too much alignment, we'll have been guaranteed
5652 proper alignment by get_trampoline_type. */
5653 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5654 return tramp;
5656 /* Round address up to desired boundary. */
5657 temp = gen_reg_rtx (Pmode);
5658 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5659 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5661 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5662 temp, 0, OPTAB_LIB_WIDEN);
5663 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5664 temp, 0, OPTAB_LIB_WIDEN);
5666 return tramp;
5669 static rtx
5670 expand_builtin_init_trampoline (tree exp, bool onstack)
5672 tree t_tramp, t_func, t_chain;
5673 rtx m_tramp, r_tramp, r_chain, tmp;
5675 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5676 POINTER_TYPE, VOID_TYPE))
5677 return NULL_RTX;
5679 t_tramp = CALL_EXPR_ARG (exp, 0);
5680 t_func = CALL_EXPR_ARG (exp, 1);
5681 t_chain = CALL_EXPR_ARG (exp, 2);
5683 r_tramp = expand_normal (t_tramp);
5684 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5685 MEM_NOTRAP_P (m_tramp) = 1;
5687 /* If ONSTACK, the TRAMP argument should be the address of a field
5688 within the local function's FRAME decl. Either way, let's see if
5689 we can fill in the MEM_ATTRs for this memory. */
5690 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5691 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5693 /* Creator of a heap trampoline is responsible for making sure the
5694 address is aligned to at least STACK_BOUNDARY. Normally malloc
5695 will ensure this anyhow. */
5696 tmp = round_trampoline_addr (r_tramp);
5697 if (tmp != r_tramp)
5699 m_tramp = change_address (m_tramp, BLKmode, tmp);
5700 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5701 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5704 /* The FUNC argument should be the address of the nested function.
5705 Extract the actual function decl to pass to the hook. */
5706 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5707 t_func = TREE_OPERAND (t_func, 0);
5708 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5710 r_chain = expand_normal (t_chain);
5712 /* Generate insns to initialize the trampoline. */
5713 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5715 if (onstack)
5717 trampolines_created = 1;
5719 if (targetm.calls.custom_function_descriptors != 0)
5720 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5721 "trampoline generated for nested function %qD", t_func);
5724 return const0_rtx;
5727 static rtx
5728 expand_builtin_adjust_trampoline (tree exp)
5730 rtx tramp;
5732 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5733 return NULL_RTX;
5735 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5736 tramp = round_trampoline_addr (tramp);
5737 if (targetm.calls.trampoline_adjust_address)
5738 tramp = targetm.calls.trampoline_adjust_address (tramp);
5740 return tramp;
5743 /* Expand a call to the builtin descriptor initialization routine.
5744 A descriptor is made up of a couple of pointers to the static
5745 chain and the code entry in this order. */
5747 static rtx
5748 expand_builtin_init_descriptor (tree exp)
5750 tree t_descr, t_func, t_chain;
5751 rtx m_descr, r_descr, r_func, r_chain;
5753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5754 VOID_TYPE))
5755 return NULL_RTX;
5757 t_descr = CALL_EXPR_ARG (exp, 0);
5758 t_func = CALL_EXPR_ARG (exp, 1);
5759 t_chain = CALL_EXPR_ARG (exp, 2);
5761 r_descr = expand_normal (t_descr);
5762 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5763 MEM_NOTRAP_P (m_descr) = 1;
5765 r_func = expand_normal (t_func);
5766 r_chain = expand_normal (t_chain);
5768 /* Generate insns to initialize the descriptor. */
5769 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5770 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5771 POINTER_SIZE / BITS_PER_UNIT), r_func);
5773 return const0_rtx;
5776 /* Expand a call to the builtin descriptor adjustment routine. */
5778 static rtx
5779 expand_builtin_adjust_descriptor (tree exp)
5781 rtx tramp;
5783 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5784 return NULL_RTX;
5786 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5788 /* Unalign the descriptor to allow runtime identification. */
5789 tramp = plus_constant (ptr_mode, tramp,
5790 targetm.calls.custom_function_descriptors);
5792 return force_operand (tramp, NULL_RTX);
5795 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5796 function. The function first checks whether the back end provides
5797 an insn to implement signbit for the respective mode. If not, it
5798 checks whether the floating point format of the value is such that
5799 the sign bit can be extracted. If that is not the case, error out.
5800 EXP is the expression that is a call to the builtin function; if
5801 convenient, the result should be placed in TARGET. */
5802 static rtx
5803 expand_builtin_signbit (tree exp, rtx target)
5805 const struct real_format *fmt;
5806 scalar_float_mode fmode;
5807 scalar_int_mode rmode, imode;
5808 tree arg;
5809 int word, bitpos;
5810 enum insn_code icode;
5811 rtx temp;
5812 location_t loc = EXPR_LOCATION (exp);
5814 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5815 return NULL_RTX;
5817 arg = CALL_EXPR_ARG (exp, 0);
5818 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5819 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5820 fmt = REAL_MODE_FORMAT (fmode);
5822 arg = builtin_save_expr (arg);
5824 /* Expand the argument yielding a RTX expression. */
5825 temp = expand_normal (arg);
5827 /* Check if the back end provides an insn that handles signbit for the
5828 argument's mode. */
5829 icode = optab_handler (signbit_optab, fmode);
5830 if (icode != CODE_FOR_nothing)
5832 rtx_insn *last = get_last_insn ();
5833 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5834 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5835 return target;
5836 delete_insns_since (last);
5839 /* For floating point formats without a sign bit, implement signbit
5840 as "ARG < 0.0". */
5841 bitpos = fmt->signbit_ro;
5842 if (bitpos < 0)
5844 /* But we can't do this if the format supports signed zero. */
5845 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5847 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5848 build_real (TREE_TYPE (arg), dconst0));
5849 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5852 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5854 imode = int_mode_for_mode (fmode).require ();
5855 temp = gen_lowpart (imode, temp);
5857 else
5859 imode = word_mode;
5860 /* Handle targets with different FP word orders. */
5861 if (FLOAT_WORDS_BIG_ENDIAN)
5862 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5863 else
5864 word = bitpos / BITS_PER_WORD;
5865 temp = operand_subword_force (temp, word, fmode);
5866 bitpos = bitpos % BITS_PER_WORD;
5869 /* Force the intermediate word_mode (or narrower) result into a
5870 register. This avoids attempting to create paradoxical SUBREGs
5871 of floating point modes below. */
5872 temp = force_reg (imode, temp);
5874 /* If the bitpos is within the "result mode" lowpart, the operation
5875 can be implement with a single bitwise AND. Otherwise, we need
5876 a right shift and an AND. */
5878 if (bitpos < GET_MODE_BITSIZE (rmode))
5880 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5882 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5883 temp = gen_lowpart (rmode, temp);
5884 temp = expand_binop (rmode, and_optab, temp,
5885 immed_wide_int_const (mask, rmode),
5886 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5888 else
5890 /* Perform a logical right shift to place the signbit in the least
5891 significant bit, then truncate the result to the desired mode
5892 and mask just this bit. */
5893 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5894 temp = gen_lowpart (rmode, temp);
5895 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5896 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5899 return temp;
5902 /* Expand fork or exec calls. TARGET is the desired target of the
5903 call. EXP is the call. FN is the
5904 identificator of the actual function. IGNORE is nonzero if the
5905 value is to be ignored. */
5907 static rtx
5908 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5910 tree id, decl;
5911 tree call;
5913 /* If we are not profiling, just call the function. */
5914 if (!profile_arc_flag)
5915 return NULL_RTX;
5917 /* Otherwise call the wrapper. This should be equivalent for the rest of
5918 compiler, so the code does not diverge, and the wrapper may run the
5919 code necessary for keeping the profiling sane. */
5921 switch (DECL_FUNCTION_CODE (fn))
5923 case BUILT_IN_FORK:
5924 id = get_identifier ("__gcov_fork");
5925 break;
5927 case BUILT_IN_EXECL:
5928 id = get_identifier ("__gcov_execl");
5929 break;
5931 case BUILT_IN_EXECV:
5932 id = get_identifier ("__gcov_execv");
5933 break;
5935 case BUILT_IN_EXECLP:
5936 id = get_identifier ("__gcov_execlp");
5937 break;
5939 case BUILT_IN_EXECLE:
5940 id = get_identifier ("__gcov_execle");
5941 break;
5943 case BUILT_IN_EXECVP:
5944 id = get_identifier ("__gcov_execvp");
5945 break;
5947 case BUILT_IN_EXECVE:
5948 id = get_identifier ("__gcov_execve");
5949 break;
5951 default:
5952 gcc_unreachable ();
5955 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5956 FUNCTION_DECL, id, TREE_TYPE (fn));
5957 DECL_EXTERNAL (decl) = 1;
5958 TREE_PUBLIC (decl) = 1;
5959 DECL_ARTIFICIAL (decl) = 1;
5960 TREE_NOTHROW (decl) = 1;
5961 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5962 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5963 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5964 return expand_call (call, target, ignore);
5969 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5970 the pointer in these functions is void*, the tree optimizers may remove
5971 casts. The mode computed in expand_builtin isn't reliable either, due
5972 to __sync_bool_compare_and_swap.
5974 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5975 group of builtins. This gives us log2 of the mode size. */
5977 static inline machine_mode
5978 get_builtin_sync_mode (int fcode_diff)
5980 /* The size is not negotiable, so ask not to get BLKmode in return
5981 if the target indicates that a smaller size would be better. */
5982 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5985 /* Expand the memory expression LOC and return the appropriate memory operand
5986 for the builtin_sync operations. */
5988 static rtx
5989 get_builtin_sync_mem (tree loc, machine_mode mode)
5991 rtx addr, mem;
5992 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5993 ? TREE_TYPE (TREE_TYPE (loc))
5994 : TREE_TYPE (loc));
5995 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5997 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5998 addr = convert_memory_address (addr_mode, addr);
6000 /* Note that we explicitly do not want any alias information for this
6001 memory, so that we kill all other live memories. Otherwise we don't
6002 satisfy the full barrier semantics of the intrinsic. */
6003 mem = gen_rtx_MEM (mode, addr);
6005 set_mem_addr_space (mem, addr_space);
6007 mem = validize_mem (mem);
6009 /* The alignment needs to be at least according to that of the mode. */
6010 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6011 get_pointer_alignment (loc)));
6012 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6013 MEM_VOLATILE_P (mem) = 1;
6015 return mem;
6018 /* Make sure an argument is in the right mode.
6019 EXP is the tree argument.
6020 MODE is the mode it should be in. */
6022 static rtx
6023 expand_expr_force_mode (tree exp, machine_mode mode)
6025 rtx val;
6026 machine_mode old_mode;
6028 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6029 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6030 of CONST_INTs, where we know the old_mode only from the call argument. */
6032 old_mode = GET_MODE (val);
6033 if (old_mode == VOIDmode)
6034 old_mode = TYPE_MODE (TREE_TYPE (exp));
6035 val = convert_modes (mode, old_mode, val, 1);
6036 return val;
6040 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6041 EXP is the CALL_EXPR. CODE is the rtx code
6042 that corresponds to the arithmetic or logical operation from the name;
6043 an exception here is that NOT actually means NAND. TARGET is an optional
6044 place for us to store the results; AFTER is true if this is the
6045 fetch_and_xxx form. */
6047 static rtx
6048 expand_builtin_sync_operation (machine_mode mode, tree exp,
6049 enum rtx_code code, bool after,
6050 rtx target)
6052 rtx val, mem;
6053 location_t loc = EXPR_LOCATION (exp);
6055 if (code == NOT && warn_sync_nand)
6057 tree fndecl = get_callee_fndecl (exp);
6058 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6060 static bool warned_f_a_n, warned_n_a_f;
6062 switch (fcode)
6064 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6065 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6066 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6067 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6068 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6069 if (warned_f_a_n)
6070 break;
6072 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6073 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6074 warned_f_a_n = true;
6075 break;
6077 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6078 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6079 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6080 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6081 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6082 if (warned_n_a_f)
6083 break;
6085 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6086 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6087 warned_n_a_f = true;
6088 break;
6090 default:
6091 gcc_unreachable ();
6095 /* Expand the operands. */
6096 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6097 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6099 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6100 after);
6103 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6104 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6105 true if this is the boolean form. TARGET is a place for us to store the
6106 results; this is NOT optional if IS_BOOL is true. */
6108 static rtx
6109 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6110 bool is_bool, rtx target)
6112 rtx old_val, new_val, mem;
6113 rtx *pbool, *poval;
6115 /* Expand the operands. */
6116 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6117 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6118 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6120 pbool = poval = NULL;
6121 if (target != const0_rtx)
6123 if (is_bool)
6124 pbool = &target;
6125 else
6126 poval = &target;
6128 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6129 false, MEMMODEL_SYNC_SEQ_CST,
6130 MEMMODEL_SYNC_SEQ_CST))
6131 return NULL_RTX;
6133 return target;
6136 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6137 general form is actually an atomic exchange, and some targets only
6138 support a reduced form with the second argument being a constant 1.
6139 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6140 the results. */
6142 static rtx
6143 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6144 rtx target)
6146 rtx val, mem;
6148 /* Expand the operands. */
6149 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6150 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6152 return expand_sync_lock_test_and_set (target, mem, val);
6155 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6157 static void
6158 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6160 rtx mem;
6162 /* Expand the operands. */
6163 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6165 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6168 /* Given an integer representing an ``enum memmodel'', verify its
6169 correctness and return the memory model enum. */
6171 static enum memmodel
6172 get_memmodel (tree exp)
6174 rtx op;
6175 unsigned HOST_WIDE_INT val;
6176 location_t loc
6177 = expansion_point_location_if_in_system_header (input_location);
6179 /* If the parameter is not a constant, it's a run time value so we'll just
6180 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6181 if (TREE_CODE (exp) != INTEGER_CST)
6182 return MEMMODEL_SEQ_CST;
6184 op = expand_normal (exp);
6186 val = INTVAL (op);
6187 if (targetm.memmodel_check)
6188 val = targetm.memmodel_check (val);
6189 else if (val & ~MEMMODEL_MASK)
6191 warning_at (loc, OPT_Winvalid_memory_model,
6192 "unknown architecture specifier in memory model to builtin");
6193 return MEMMODEL_SEQ_CST;
6196 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6197 if (memmodel_base (val) >= MEMMODEL_LAST)
6199 warning_at (loc, OPT_Winvalid_memory_model,
6200 "invalid memory model argument to builtin");
6201 return MEMMODEL_SEQ_CST;
6204 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6205 be conservative and promote consume to acquire. */
6206 if (val == MEMMODEL_CONSUME)
6207 val = MEMMODEL_ACQUIRE;
6209 return (enum memmodel) val;
6212 /* Expand the __atomic_exchange intrinsic:
6213 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6214 EXP is the CALL_EXPR.
6215 TARGET is an optional place for us to store the results. */
6217 static rtx
6218 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6220 rtx val, mem;
6221 enum memmodel model;
6223 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6225 if (!flag_inline_atomics)
6226 return NULL_RTX;
6228 /* Expand the operands. */
6229 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6230 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6232 return expand_atomic_exchange (target, mem, val, model);
6235 /* Expand the __atomic_compare_exchange intrinsic:
6236 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6237 TYPE desired, BOOL weak,
6238 enum memmodel success,
6239 enum memmodel failure)
6240 EXP is the CALL_EXPR.
6241 TARGET is an optional place for us to store the results. */
6243 static rtx
6244 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6245 rtx target)
6247 rtx expect, desired, mem, oldval;
6248 rtx_code_label *label;
6249 enum memmodel success, failure;
6250 tree weak;
6251 bool is_weak;
6252 location_t loc
6253 = expansion_point_location_if_in_system_header (input_location);
6255 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6256 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6258 if (failure > success)
6260 warning_at (loc, OPT_Winvalid_memory_model,
6261 "failure memory model cannot be stronger than success "
6262 "memory model for %<__atomic_compare_exchange%>");
6263 success = MEMMODEL_SEQ_CST;
6266 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6268 warning_at (loc, OPT_Winvalid_memory_model,
6269 "invalid failure memory model for "
6270 "%<__atomic_compare_exchange%>");
6271 failure = MEMMODEL_SEQ_CST;
6272 success = MEMMODEL_SEQ_CST;
6276 if (!flag_inline_atomics)
6277 return NULL_RTX;
6279 /* Expand the operands. */
6280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6282 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6283 expect = convert_memory_address (Pmode, expect);
6284 expect = gen_rtx_MEM (mode, expect);
6285 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6287 weak = CALL_EXPR_ARG (exp, 3);
6288 is_weak = false;
6289 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6290 is_weak = true;
6292 if (target == const0_rtx)
6293 target = NULL;
6295 /* Lest the rtl backend create a race condition with an imporoper store
6296 to memory, always create a new pseudo for OLDVAL. */
6297 oldval = NULL;
6299 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6300 is_weak, success, failure))
6301 return NULL_RTX;
6303 /* Conditionally store back to EXPECT, lest we create a race condition
6304 with an improper store to memory. */
6305 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6306 the normal case where EXPECT is totally private, i.e. a register. At
6307 which point the store can be unconditional. */
6308 label = gen_label_rtx ();
6309 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6310 GET_MODE (target), 1, label);
6311 emit_move_insn (expect, oldval);
6312 emit_label (label);
6314 return target;
6317 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6318 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6319 call. The weak parameter must be dropped to match the expected parameter
6320 list and the expected argument changed from value to pointer to memory
6321 slot. */
6323 static void
6324 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6326 unsigned int z;
6327 vec<tree, va_gc> *vec;
6329 vec_alloc (vec, 5);
6330 vec->quick_push (gimple_call_arg (call, 0));
6331 tree expected = gimple_call_arg (call, 1);
6332 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6333 TREE_TYPE (expected));
6334 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6335 if (expd != x)
6336 emit_move_insn (x, expd);
6337 tree v = make_tree (TREE_TYPE (expected), x);
6338 vec->quick_push (build1 (ADDR_EXPR,
6339 build_pointer_type (TREE_TYPE (expected)), v));
6340 vec->quick_push (gimple_call_arg (call, 2));
6341 /* Skip the boolean weak parameter. */
6342 for (z = 4; z < 6; z++)
6343 vec->quick_push (gimple_call_arg (call, z));
6344 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6345 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6346 gcc_assert (bytes_log2 < 5);
6347 built_in_function fncode
6348 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6349 + bytes_log2);
6350 tree fndecl = builtin_decl_explicit (fncode);
6351 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6352 fndecl);
6353 tree exp = build_call_vec (boolean_type_node, fn, vec);
6354 tree lhs = gimple_call_lhs (call);
6355 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6356 if (lhs)
6358 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6359 if (GET_MODE (boolret) != mode)
6360 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6361 x = force_reg (mode, x);
6362 write_complex_part (target, boolret, true);
6363 write_complex_part (target, x, false);
6367 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6369 void
6370 expand_ifn_atomic_compare_exchange (gcall *call)
6372 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6373 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6374 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6375 rtx expect, desired, mem, oldval, boolret;
6376 enum memmodel success, failure;
6377 tree lhs;
6378 bool is_weak;
6379 location_t loc
6380 = expansion_point_location_if_in_system_header (gimple_location (call));
6382 success = get_memmodel (gimple_call_arg (call, 4));
6383 failure = get_memmodel (gimple_call_arg (call, 5));
6385 if (failure > success)
6387 warning_at (loc, OPT_Winvalid_memory_model,
6388 "failure memory model cannot be stronger than success "
6389 "memory model for %<__atomic_compare_exchange%>");
6390 success = MEMMODEL_SEQ_CST;
6393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6395 warning_at (loc, OPT_Winvalid_memory_model,
6396 "invalid failure memory model for "
6397 "%<__atomic_compare_exchange%>");
6398 failure = MEMMODEL_SEQ_CST;
6399 success = MEMMODEL_SEQ_CST;
6402 if (!flag_inline_atomics)
6404 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6405 return;
6408 /* Expand the operands. */
6409 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6411 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6412 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6414 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6416 boolret = NULL;
6417 oldval = NULL;
6419 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6420 is_weak, success, failure))
6422 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6423 return;
6426 lhs = gimple_call_lhs (call);
6427 if (lhs)
6429 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6430 if (GET_MODE (boolret) != mode)
6431 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6432 write_complex_part (target, boolret, true);
6433 write_complex_part (target, oldval, false);
6437 /* Expand the __atomic_load intrinsic:
6438 TYPE __atomic_load (TYPE *object, enum memmodel)
6439 EXP is the CALL_EXPR.
6440 TARGET is an optional place for us to store the results. */
6442 static rtx
6443 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6445 rtx mem;
6446 enum memmodel model;
6448 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6449 if (is_mm_release (model) || is_mm_acq_rel (model))
6451 location_t loc
6452 = expansion_point_location_if_in_system_header (input_location);
6453 warning_at (loc, OPT_Winvalid_memory_model,
6454 "invalid memory model for %<__atomic_load%>");
6455 model = MEMMODEL_SEQ_CST;
6458 if (!flag_inline_atomics)
6459 return NULL_RTX;
6461 /* Expand the operand. */
6462 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6464 return expand_atomic_load (target, mem, model);
6468 /* Expand the __atomic_store intrinsic:
6469 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6470 EXP is the CALL_EXPR.
6471 TARGET is an optional place for us to store the results. */
6473 static rtx
6474 expand_builtin_atomic_store (machine_mode mode, tree exp)
6476 rtx mem, val;
6477 enum memmodel model;
6479 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6480 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6481 || is_mm_release (model)))
6483 location_t loc
6484 = expansion_point_location_if_in_system_header (input_location);
6485 warning_at (loc, OPT_Winvalid_memory_model,
6486 "invalid memory model for %<__atomic_store%>");
6487 model = MEMMODEL_SEQ_CST;
6490 if (!flag_inline_atomics)
6491 return NULL_RTX;
6493 /* Expand the operands. */
6494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6495 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6497 return expand_atomic_store (mem, val, model, false);
6500 /* Expand the __atomic_fetch_XXX intrinsic:
6501 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6502 EXP is the CALL_EXPR.
6503 TARGET is an optional place for us to store the results.
6504 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6505 FETCH_AFTER is true if returning the result of the operation.
6506 FETCH_AFTER is false if returning the value before the operation.
6507 IGNORE is true if the result is not used.
6508 EXT_CALL is the correct builtin for an external call if this cannot be
6509 resolved to an instruction sequence. */
6511 static rtx
6512 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6513 enum rtx_code code, bool fetch_after,
6514 bool ignore, enum built_in_function ext_call)
6516 rtx val, mem, ret;
6517 enum memmodel model;
6518 tree fndecl;
6519 tree addr;
6521 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6523 /* Expand the operands. */
6524 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6525 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6527 /* Only try generating instructions if inlining is turned on. */
6528 if (flag_inline_atomics)
6530 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6531 if (ret)
6532 return ret;
6535 /* Return if a different routine isn't needed for the library call. */
6536 if (ext_call == BUILT_IN_NONE)
6537 return NULL_RTX;
6539 /* Change the call to the specified function. */
6540 fndecl = get_callee_fndecl (exp);
6541 addr = CALL_EXPR_FN (exp);
6542 STRIP_NOPS (addr);
6544 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6545 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6547 /* If we will emit code after the call, the call cannot be a tail call.
6548 If it is emitted as a tail call, a barrier is emitted after it, and
6549 then all trailing code is removed. */
6550 if (!ignore)
6551 CALL_EXPR_TAILCALL (exp) = 0;
6553 /* Expand the call here so we can emit trailing code. */
6554 ret = expand_call (exp, target, ignore);
6556 /* Replace the original function just in case it matters. */
6557 TREE_OPERAND (addr, 0) = fndecl;
6559 /* Then issue the arithmetic correction to return the right result. */
6560 if (!ignore)
6562 if (code == NOT)
6564 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6565 OPTAB_LIB_WIDEN);
6566 ret = expand_simple_unop (mode, NOT, ret, target, true);
6568 else
6569 ret = expand_simple_binop (mode, code, ret, val, target, true,
6570 OPTAB_LIB_WIDEN);
6572 return ret;
6575 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6577 void
6578 expand_ifn_atomic_bit_test_and (gcall *call)
6580 tree ptr = gimple_call_arg (call, 0);
6581 tree bit = gimple_call_arg (call, 1);
6582 tree flag = gimple_call_arg (call, 2);
6583 tree lhs = gimple_call_lhs (call);
6584 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6585 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6586 enum rtx_code code;
6587 optab optab;
6588 struct expand_operand ops[5];
6590 gcc_assert (flag_inline_atomics);
6592 if (gimple_call_num_args (call) == 4)
6593 model = get_memmodel (gimple_call_arg (call, 3));
6595 rtx mem = get_builtin_sync_mem (ptr, mode);
6596 rtx val = expand_expr_force_mode (bit, mode);
6598 switch (gimple_call_internal_fn (call))
6600 case IFN_ATOMIC_BIT_TEST_AND_SET:
6601 code = IOR;
6602 optab = atomic_bit_test_and_set_optab;
6603 break;
6604 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6605 code = XOR;
6606 optab = atomic_bit_test_and_complement_optab;
6607 break;
6608 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6609 code = AND;
6610 optab = atomic_bit_test_and_reset_optab;
6611 break;
6612 default:
6613 gcc_unreachable ();
6616 if (lhs == NULL_TREE)
6618 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6619 val, NULL_RTX, true, OPTAB_DIRECT);
6620 if (code == AND)
6621 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6622 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6623 return;
6626 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6627 enum insn_code icode = direct_optab_handler (optab, mode);
6628 gcc_assert (icode != CODE_FOR_nothing);
6629 create_output_operand (&ops[0], target, mode);
6630 create_fixed_operand (&ops[1], mem);
6631 create_convert_operand_to (&ops[2], val, mode, true);
6632 create_integer_operand (&ops[3], model);
6633 create_integer_operand (&ops[4], integer_onep (flag));
6634 if (maybe_expand_insn (icode, 5, ops))
6635 return;
6637 rtx bitval = val;
6638 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6639 val, NULL_RTX, true, OPTAB_DIRECT);
6640 rtx maskval = val;
6641 if (code == AND)
6642 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6643 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6644 code, model, false);
6645 if (integer_onep (flag))
6647 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6648 NULL_RTX, true, OPTAB_DIRECT);
6649 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6650 true, OPTAB_DIRECT);
6652 else
6653 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6654 OPTAB_DIRECT);
6655 if (result != target)
6656 emit_move_insn (target, result);
6659 /* Expand an atomic clear operation.
6660 void _atomic_clear (BOOL *obj, enum memmodel)
6661 EXP is the call expression. */
6663 static rtx
6664 expand_builtin_atomic_clear (tree exp)
6666 machine_mode mode;
6667 rtx mem, ret;
6668 enum memmodel model;
6670 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6671 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6672 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6674 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6676 location_t loc
6677 = expansion_point_location_if_in_system_header (input_location);
6678 warning_at (loc, OPT_Winvalid_memory_model,
6679 "invalid memory model for %<__atomic_store%>");
6680 model = MEMMODEL_SEQ_CST;
6683 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6684 Failing that, a store is issued by __atomic_store. The only way this can
6685 fail is if the bool type is larger than a word size. Unlikely, but
6686 handle it anyway for completeness. Assume a single threaded model since
6687 there is no atomic support in this case, and no barriers are required. */
6688 ret = expand_atomic_store (mem, const0_rtx, model, true);
6689 if (!ret)
6690 emit_move_insn (mem, const0_rtx);
6691 return const0_rtx;
6694 /* Expand an atomic test_and_set operation.
6695 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6696 EXP is the call expression. */
6698 static rtx
6699 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6701 rtx mem;
6702 enum memmodel model;
6703 machine_mode mode;
6705 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6706 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6707 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6709 return expand_atomic_test_and_set (target, mem, model);
6713 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6714 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6716 static tree
6717 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6719 int size;
6720 machine_mode mode;
6721 unsigned int mode_align, type_align;
6723 if (TREE_CODE (arg0) != INTEGER_CST)
6724 return NULL_TREE;
6726 /* We need a corresponding integer mode for the access to be lock-free. */
6727 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6728 if (!int_mode_for_size (size, 0).exists (&mode))
6729 return boolean_false_node;
6731 mode_align = GET_MODE_ALIGNMENT (mode);
6733 if (TREE_CODE (arg1) == INTEGER_CST)
6735 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6737 /* Either this argument is null, or it's a fake pointer encoding
6738 the alignment of the object. */
6739 val = least_bit_hwi (val);
6740 val *= BITS_PER_UNIT;
6742 if (val == 0 || mode_align < val)
6743 type_align = mode_align;
6744 else
6745 type_align = val;
6747 else
6749 tree ttype = TREE_TYPE (arg1);
6751 /* This function is usually invoked and folded immediately by the front
6752 end before anything else has a chance to look at it. The pointer
6753 parameter at this point is usually cast to a void *, so check for that
6754 and look past the cast. */
6755 if (CONVERT_EXPR_P (arg1)
6756 && POINTER_TYPE_P (ttype)
6757 && VOID_TYPE_P (TREE_TYPE (ttype))
6758 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6759 arg1 = TREE_OPERAND (arg1, 0);
6761 ttype = TREE_TYPE (arg1);
6762 gcc_assert (POINTER_TYPE_P (ttype));
6764 /* Get the underlying type of the object. */
6765 ttype = TREE_TYPE (ttype);
6766 type_align = TYPE_ALIGN (ttype);
6769 /* If the object has smaller alignment, the lock free routines cannot
6770 be used. */
6771 if (type_align < mode_align)
6772 return boolean_false_node;
6774 /* Check if a compare_and_swap pattern exists for the mode which represents
6775 the required size. The pattern is not allowed to fail, so the existence
6776 of the pattern indicates support is present. Also require that an
6777 atomic load exists for the required size. */
6778 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6779 return boolean_true_node;
6780 else
6781 return boolean_false_node;
6784 /* Return true if the parameters to call EXP represent an object which will
6785 always generate lock free instructions. The first argument represents the
6786 size of the object, and the second parameter is a pointer to the object
6787 itself. If NULL is passed for the object, then the result is based on
6788 typical alignment for an object of the specified size. Otherwise return
6789 false. */
6791 static rtx
6792 expand_builtin_atomic_always_lock_free (tree exp)
6794 tree size;
6795 tree arg0 = CALL_EXPR_ARG (exp, 0);
6796 tree arg1 = CALL_EXPR_ARG (exp, 1);
6798 if (TREE_CODE (arg0) != INTEGER_CST)
6800 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6801 return const0_rtx;
6804 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6805 if (size == boolean_true_node)
6806 return const1_rtx;
6807 return const0_rtx;
6810 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6811 is lock free on this architecture. */
6813 static tree
6814 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6816 if (!flag_inline_atomics)
6817 return NULL_TREE;
6819 /* If it isn't always lock free, don't generate a result. */
6820 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6821 return boolean_true_node;
6823 return NULL_TREE;
6826 /* Return true if the parameters to call EXP represent an object which will
6827 always generate lock free instructions. The first argument represents the
6828 size of the object, and the second parameter is a pointer to the object
6829 itself. If NULL is passed for the object, then the result is based on
6830 typical alignment for an object of the specified size. Otherwise return
6831 NULL*/
6833 static rtx
6834 expand_builtin_atomic_is_lock_free (tree exp)
6836 tree size;
6837 tree arg0 = CALL_EXPR_ARG (exp, 0);
6838 tree arg1 = CALL_EXPR_ARG (exp, 1);
6840 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6842 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6843 return NULL_RTX;
6846 if (!flag_inline_atomics)
6847 return NULL_RTX;
6849 /* If the value is known at compile time, return the RTX for it. */
6850 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6851 if (size == boolean_true_node)
6852 return const1_rtx;
6854 return NULL_RTX;
6857 /* Expand the __atomic_thread_fence intrinsic:
6858 void __atomic_thread_fence (enum memmodel)
6859 EXP is the CALL_EXPR. */
6861 static void
6862 expand_builtin_atomic_thread_fence (tree exp)
6864 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6865 expand_mem_thread_fence (model);
6868 /* Expand the __atomic_signal_fence intrinsic:
6869 void __atomic_signal_fence (enum memmodel)
6870 EXP is the CALL_EXPR. */
6872 static void
6873 expand_builtin_atomic_signal_fence (tree exp)
6875 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6876 expand_mem_signal_fence (model);
6879 /* Expand the __sync_synchronize intrinsic. */
6881 static void
6882 expand_builtin_sync_synchronize (void)
6884 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6887 static rtx
6888 expand_builtin_thread_pointer (tree exp, rtx target)
6890 enum insn_code icode;
6891 if (!validate_arglist (exp, VOID_TYPE))
6892 return const0_rtx;
6893 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6894 if (icode != CODE_FOR_nothing)
6896 struct expand_operand op;
6897 /* If the target is not sutitable then create a new target. */
6898 if (target == NULL_RTX
6899 || !REG_P (target)
6900 || GET_MODE (target) != Pmode)
6901 target = gen_reg_rtx (Pmode);
6902 create_output_operand (&op, target, Pmode);
6903 expand_insn (icode, 1, &op);
6904 return target;
6906 error ("%<__builtin_thread_pointer%> is not supported on this target");
6907 return const0_rtx;
6910 static void
6911 expand_builtin_set_thread_pointer (tree exp)
6913 enum insn_code icode;
6914 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6915 return;
6916 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6917 if (icode != CODE_FOR_nothing)
6919 struct expand_operand op;
6920 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6921 Pmode, EXPAND_NORMAL);
6922 create_input_operand (&op, val, Pmode);
6923 expand_insn (icode, 1, &op);
6924 return;
6926 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6930 /* Emit code to restore the current value of stack. */
6932 static void
6933 expand_stack_restore (tree var)
6935 rtx_insn *prev;
6936 rtx sa = expand_normal (var);
6938 sa = convert_memory_address (Pmode, sa);
6940 prev = get_last_insn ();
6941 emit_stack_restore (SAVE_BLOCK, sa);
6943 record_new_stack_level ();
6945 fixup_args_size_notes (prev, get_last_insn (), 0);
6948 /* Emit code to save the current value of stack. */
6950 static rtx
6951 expand_stack_save (void)
6953 rtx ret = NULL_RTX;
6955 emit_stack_save (SAVE_BLOCK, &ret);
6956 return ret;
6959 /* Emit code to get the openacc gang, worker or vector id or size. */
6961 static rtx
6962 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6964 const char *name;
6965 rtx fallback_retval;
6966 rtx_insn *(*gen_fn) (rtx, rtx);
6967 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6969 case BUILT_IN_GOACC_PARLEVEL_ID:
6970 name = "__builtin_goacc_parlevel_id";
6971 fallback_retval = const0_rtx;
6972 gen_fn = targetm.gen_oacc_dim_pos;
6973 break;
6974 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6975 name = "__builtin_goacc_parlevel_size";
6976 fallback_retval = const1_rtx;
6977 gen_fn = targetm.gen_oacc_dim_size;
6978 break;
6979 default:
6980 gcc_unreachable ();
6983 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6985 error ("%qs only supported in OpenACC code", name);
6986 return const0_rtx;
6989 tree arg = CALL_EXPR_ARG (exp, 0);
6990 if (TREE_CODE (arg) != INTEGER_CST)
6992 error ("non-constant argument 0 to %qs", name);
6993 return const0_rtx;
6996 int dim = TREE_INT_CST_LOW (arg);
6997 switch (dim)
6999 case GOMP_DIM_GANG:
7000 case GOMP_DIM_WORKER:
7001 case GOMP_DIM_VECTOR:
7002 break;
7003 default:
7004 error ("illegal argument 0 to %qs", name);
7005 return const0_rtx;
7008 if (ignore)
7009 return target;
7011 if (target == NULL_RTX)
7012 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7014 if (!targetm.have_oacc_dim_size ())
7016 emit_move_insn (target, fallback_retval);
7017 return target;
7020 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7021 emit_insn (gen_fn (reg, GEN_INT (dim)));
7022 if (reg != target)
7023 emit_move_insn (target, reg);
7025 return target;
7028 /* Expand a string compare operation using a sequence of char comparison
7029 to get rid of the calling overhead, with result going to TARGET if
7030 that's convenient.
7032 VAR_STR is the variable string source;
7033 CONST_STR is the constant string source;
7034 LENGTH is the number of chars to compare;
7035 CONST_STR_N indicates which source string is the constant string;
7036 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7038 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7040 target = (int) (unsigned char) var_str[0]
7041 - (int) (unsigned char) const_str[0];
7042 if (target != 0)
7043 goto ne_label;
7045 target = (int) (unsigned char) var_str[length - 2]
7046 - (int) (unsigned char) const_str[length - 2];
7047 if (target != 0)
7048 goto ne_label;
7049 target = (int) (unsigned char) var_str[length - 1]
7050 - (int) (unsigned char) const_str[length - 1];
7051 ne_label:
7054 static rtx
7055 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7056 unsigned HOST_WIDE_INT length,
7057 int const_str_n, machine_mode mode)
7059 HOST_WIDE_INT offset = 0;
7060 rtx var_rtx_array
7061 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7062 rtx var_rtx = NULL_RTX;
7063 rtx const_rtx = NULL_RTX;
7064 rtx result = target ? target : gen_reg_rtx (mode);
7065 rtx_code_label *ne_label = gen_label_rtx ();
7066 tree unit_type_node = unsigned_char_type_node;
7067 scalar_int_mode unit_mode
7068 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7070 start_sequence ();
7072 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7074 var_rtx
7075 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7076 const_rtx = c_readstr (const_str + offset, unit_mode);
7077 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7078 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7080 op0 = convert_modes (mode, unit_mode, op0, 1);
7081 op1 = convert_modes (mode, unit_mode, op1, 1);
7082 result = expand_simple_binop (mode, MINUS, op0, op1,
7083 result, 1, OPTAB_WIDEN);
7084 if (i < length - 1)
7085 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7086 mode, true, ne_label);
7087 offset += GET_MODE_SIZE (unit_mode);
7090 emit_label (ne_label);
7091 rtx_insn *insns = get_insns ();
7092 end_sequence ();
7093 emit_insn (insns);
7095 return result;
7098 /* Inline expansion a call to str(n)cmp, with result going to
7099 TARGET if that's convenient.
7100 If the call is not been inlined, return NULL_RTX. */
7101 static rtx
7102 inline_expand_builtin_string_cmp (tree exp, rtx target)
7104 tree fndecl = get_callee_fndecl (exp);
7105 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7106 unsigned HOST_WIDE_INT length = 0;
7107 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7109 /* Do NOT apply this inlining expansion when optimizing for size or
7110 optimization level below 2. */
7111 if (optimize < 2 || optimize_insn_for_size_p ())
7112 return NULL_RTX;
7114 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7115 || fcode == BUILT_IN_STRNCMP
7116 || fcode == BUILT_IN_MEMCMP);
7118 /* On a target where the type of the call (int) has same or narrower presicion
7119 than unsigned char, give up the inlining expansion. */
7120 if (TYPE_PRECISION (unsigned_char_type_node)
7121 >= TYPE_PRECISION (TREE_TYPE (exp)))
7122 return NULL_RTX;
7124 tree arg1 = CALL_EXPR_ARG (exp, 0);
7125 tree arg2 = CALL_EXPR_ARG (exp, 1);
7126 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7128 unsigned HOST_WIDE_INT len1 = 0;
7129 unsigned HOST_WIDE_INT len2 = 0;
7130 unsigned HOST_WIDE_INT len3 = 0;
7132 const char *src_str1 = c_getstr (arg1, &len1);
7133 const char *src_str2 = c_getstr (arg2, &len2);
7135 /* If neither strings is constant string, the call is not qualify. */
7136 if (!src_str1 && !src_str2)
7137 return NULL_RTX;
7139 /* For strncmp, if the length is not a const, not qualify. */
7140 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7141 return NULL_RTX;
7143 int const_str_n = 0;
7144 if (!len1)
7145 const_str_n = 2;
7146 else if (!len2)
7147 const_str_n = 1;
7148 else if (len2 > len1)
7149 const_str_n = 1;
7150 else
7151 const_str_n = 2;
7153 gcc_checking_assert (const_str_n > 0);
7154 length = (const_str_n == 1) ? len1 : len2;
7156 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7157 length = len3;
7159 /* If the length of the comparision is larger than the threshold,
7160 do nothing. */
7161 if (length > (unsigned HOST_WIDE_INT)
7162 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7163 return NULL_RTX;
7165 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7167 /* Now, start inline expansion the call. */
7168 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7169 (const_str_n == 1) ? src_str1 : src_str2, length,
7170 const_str_n, mode);
7173 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7174 represents the size of the first argument to that call, or VOIDmode
7175 if the argument is a pointer. IGNORE will be true if the result
7176 isn't used. */
7177 static rtx
7178 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7179 bool ignore)
7181 rtx val, failsafe;
7182 unsigned nargs = call_expr_nargs (exp);
7184 tree arg0 = CALL_EXPR_ARG (exp, 0);
7186 if (mode == VOIDmode)
7188 mode = TYPE_MODE (TREE_TYPE (arg0));
7189 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7192 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7194 /* An optional second argument can be used as a failsafe value on
7195 some machines. If it isn't present, then the failsafe value is
7196 assumed to be 0. */
7197 if (nargs > 1)
7199 tree arg1 = CALL_EXPR_ARG (exp, 1);
7200 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7202 else
7203 failsafe = const0_rtx;
7205 /* If the result isn't used, the behavior is undefined. It would be
7206 nice to emit a warning here, but path splitting means this might
7207 happen with legitimate code. So simply drop the builtin
7208 expansion in that case; we've handled any side-effects above. */
7209 if (ignore)
7210 return const0_rtx;
7212 /* If we don't have a suitable target, create one to hold the result. */
7213 if (target == NULL || GET_MODE (target) != mode)
7214 target = gen_reg_rtx (mode);
7216 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7217 val = convert_modes (mode, VOIDmode, val, false);
7219 return targetm.speculation_safe_value (mode, target, val, failsafe);
7222 /* Expand an expression EXP that calls a built-in function,
7223 with result going to TARGET if that's convenient
7224 (and in mode MODE if that's convenient).
7225 SUBTARGET may be used as the target for computing one of EXP's operands.
7226 IGNORE is nonzero if the value is to be ignored. */
7229 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7230 int ignore)
7232 tree fndecl = get_callee_fndecl (exp);
7233 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7234 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7235 int flags;
7237 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7238 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7240 /* When ASan is enabled, we don't want to expand some memory/string
7241 builtins and rely on libsanitizer's hooks. This allows us to avoid
7242 redundant checks and be sure, that possible overflow will be detected
7243 by ASan. */
7245 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7246 return expand_call (exp, target, ignore);
7248 /* When not optimizing, generate calls to library functions for a certain
7249 set of builtins. */
7250 if (!optimize
7251 && !called_as_built_in (fndecl)
7252 && fcode != BUILT_IN_FORK
7253 && fcode != BUILT_IN_EXECL
7254 && fcode != BUILT_IN_EXECV
7255 && fcode != BUILT_IN_EXECLP
7256 && fcode != BUILT_IN_EXECLE
7257 && fcode != BUILT_IN_EXECVP
7258 && fcode != BUILT_IN_EXECVE
7259 && !ALLOCA_FUNCTION_CODE_P (fcode)
7260 && fcode != BUILT_IN_FREE)
7261 return expand_call (exp, target, ignore);
7263 /* The built-in function expanders test for target == const0_rtx
7264 to determine whether the function's result will be ignored. */
7265 if (ignore)
7266 target = const0_rtx;
7268 /* If the result of a pure or const built-in function is ignored, and
7269 none of its arguments are volatile, we can avoid expanding the
7270 built-in call and just evaluate the arguments for side-effects. */
7271 if (target == const0_rtx
7272 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7273 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7275 bool volatilep = false;
7276 tree arg;
7277 call_expr_arg_iterator iter;
7279 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7280 if (TREE_THIS_VOLATILE (arg))
7282 volatilep = true;
7283 break;
7286 if (! volatilep)
7288 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7289 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7290 return const0_rtx;
7294 switch (fcode)
7296 CASE_FLT_FN (BUILT_IN_FABS):
7297 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7298 case BUILT_IN_FABSD32:
7299 case BUILT_IN_FABSD64:
7300 case BUILT_IN_FABSD128:
7301 target = expand_builtin_fabs (exp, target, subtarget);
7302 if (target)
7303 return target;
7304 break;
7306 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7307 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7308 target = expand_builtin_copysign (exp, target, subtarget);
7309 if (target)
7310 return target;
7311 break;
7313 /* Just do a normal library call if we were unable to fold
7314 the values. */
7315 CASE_FLT_FN (BUILT_IN_CABS):
7316 break;
7318 CASE_FLT_FN (BUILT_IN_FMA):
7319 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7320 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7321 if (target)
7322 return target;
7323 break;
7325 CASE_FLT_FN (BUILT_IN_ILOGB):
7326 if (! flag_unsafe_math_optimizations)
7327 break;
7328 gcc_fallthrough ();
7329 CASE_FLT_FN (BUILT_IN_ISINF):
7330 CASE_FLT_FN (BUILT_IN_FINITE):
7331 case BUILT_IN_ISFINITE:
7332 case BUILT_IN_ISNORMAL:
7333 target = expand_builtin_interclass_mathfn (exp, target);
7334 if (target)
7335 return target;
7336 break;
7338 CASE_FLT_FN (BUILT_IN_ICEIL):
7339 CASE_FLT_FN (BUILT_IN_LCEIL):
7340 CASE_FLT_FN (BUILT_IN_LLCEIL):
7341 CASE_FLT_FN (BUILT_IN_LFLOOR):
7342 CASE_FLT_FN (BUILT_IN_IFLOOR):
7343 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7344 target = expand_builtin_int_roundingfn (exp, target);
7345 if (target)
7346 return target;
7347 break;
7349 CASE_FLT_FN (BUILT_IN_IRINT):
7350 CASE_FLT_FN (BUILT_IN_LRINT):
7351 CASE_FLT_FN (BUILT_IN_LLRINT):
7352 CASE_FLT_FN (BUILT_IN_IROUND):
7353 CASE_FLT_FN (BUILT_IN_LROUND):
7354 CASE_FLT_FN (BUILT_IN_LLROUND):
7355 target = expand_builtin_int_roundingfn_2 (exp, target);
7356 if (target)
7357 return target;
7358 break;
7360 CASE_FLT_FN (BUILT_IN_POWI):
7361 target = expand_builtin_powi (exp, target);
7362 if (target)
7363 return target;
7364 break;
7366 CASE_FLT_FN (BUILT_IN_CEXPI):
7367 target = expand_builtin_cexpi (exp, target);
7368 gcc_assert (target);
7369 return target;
7371 CASE_FLT_FN (BUILT_IN_SIN):
7372 CASE_FLT_FN (BUILT_IN_COS):
7373 if (! flag_unsafe_math_optimizations)
7374 break;
7375 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7376 if (target)
7377 return target;
7378 break;
7380 CASE_FLT_FN (BUILT_IN_SINCOS):
7381 if (! flag_unsafe_math_optimizations)
7382 break;
7383 target = expand_builtin_sincos (exp);
7384 if (target)
7385 return target;
7386 break;
7388 case BUILT_IN_APPLY_ARGS:
7389 return expand_builtin_apply_args ();
7391 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7392 FUNCTION with a copy of the parameters described by
7393 ARGUMENTS, and ARGSIZE. It returns a block of memory
7394 allocated on the stack into which is stored all the registers
7395 that might possibly be used for returning the result of a
7396 function. ARGUMENTS is the value returned by
7397 __builtin_apply_args. ARGSIZE is the number of bytes of
7398 arguments that must be copied. ??? How should this value be
7399 computed? We'll also need a safe worst case value for varargs
7400 functions. */
7401 case BUILT_IN_APPLY:
7402 if (!validate_arglist (exp, POINTER_TYPE,
7403 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7404 && !validate_arglist (exp, REFERENCE_TYPE,
7405 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7406 return const0_rtx;
7407 else
7409 rtx ops[3];
7411 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7412 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7413 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7415 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7418 /* __builtin_return (RESULT) causes the function to return the
7419 value described by RESULT. RESULT is address of the block of
7420 memory returned by __builtin_apply. */
7421 case BUILT_IN_RETURN:
7422 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7423 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7424 return const0_rtx;
7426 case BUILT_IN_SAVEREGS:
7427 return expand_builtin_saveregs ();
7429 case BUILT_IN_VA_ARG_PACK:
7430 /* All valid uses of __builtin_va_arg_pack () are removed during
7431 inlining. */
7432 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7433 return const0_rtx;
7435 case BUILT_IN_VA_ARG_PACK_LEN:
7436 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7437 inlining. */
7438 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7439 return const0_rtx;
7441 /* Return the address of the first anonymous stack arg. */
7442 case BUILT_IN_NEXT_ARG:
7443 if (fold_builtin_next_arg (exp, false))
7444 return const0_rtx;
7445 return expand_builtin_next_arg ();
7447 case BUILT_IN_CLEAR_CACHE:
7448 target = expand_builtin___clear_cache (exp);
7449 if (target)
7450 return target;
7451 break;
7453 case BUILT_IN_CLASSIFY_TYPE:
7454 return expand_builtin_classify_type (exp);
7456 case BUILT_IN_CONSTANT_P:
7457 return const0_rtx;
7459 case BUILT_IN_FRAME_ADDRESS:
7460 case BUILT_IN_RETURN_ADDRESS:
7461 return expand_builtin_frame_address (fndecl, exp);
7463 /* Returns the address of the area where the structure is returned.
7464 0 otherwise. */
7465 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7466 if (call_expr_nargs (exp) != 0
7467 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7468 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7469 return const0_rtx;
7470 else
7471 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7473 CASE_BUILT_IN_ALLOCA:
7474 target = expand_builtin_alloca (exp);
7475 if (target)
7476 return target;
7477 break;
7479 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7480 return expand_asan_emit_allocas_unpoison (exp);
7482 case BUILT_IN_STACK_SAVE:
7483 return expand_stack_save ();
7485 case BUILT_IN_STACK_RESTORE:
7486 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7487 return const0_rtx;
7489 case BUILT_IN_BSWAP16:
7490 case BUILT_IN_BSWAP32:
7491 case BUILT_IN_BSWAP64:
7492 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7493 if (target)
7494 return target;
7495 break;
7497 CASE_INT_FN (BUILT_IN_FFS):
7498 target = expand_builtin_unop (target_mode, exp, target,
7499 subtarget, ffs_optab);
7500 if (target)
7501 return target;
7502 break;
7504 CASE_INT_FN (BUILT_IN_CLZ):
7505 target = expand_builtin_unop (target_mode, exp, target,
7506 subtarget, clz_optab);
7507 if (target)
7508 return target;
7509 break;
7511 CASE_INT_FN (BUILT_IN_CTZ):
7512 target = expand_builtin_unop (target_mode, exp, target,
7513 subtarget, ctz_optab);
7514 if (target)
7515 return target;
7516 break;
7518 CASE_INT_FN (BUILT_IN_CLRSB):
7519 target = expand_builtin_unop (target_mode, exp, target,
7520 subtarget, clrsb_optab);
7521 if (target)
7522 return target;
7523 break;
7525 CASE_INT_FN (BUILT_IN_POPCOUNT):
7526 target = expand_builtin_unop (target_mode, exp, target,
7527 subtarget, popcount_optab);
7528 if (target)
7529 return target;
7530 break;
7532 CASE_INT_FN (BUILT_IN_PARITY):
7533 target = expand_builtin_unop (target_mode, exp, target,
7534 subtarget, parity_optab);
7535 if (target)
7536 return target;
7537 break;
7539 case BUILT_IN_STRLEN:
7540 target = expand_builtin_strlen (exp, target, target_mode);
7541 if (target)
7542 return target;
7543 break;
7545 case BUILT_IN_STRNLEN:
7546 target = expand_builtin_strnlen (exp, target, target_mode);
7547 if (target)
7548 return target;
7549 break;
7551 case BUILT_IN_STRCAT:
7552 target = expand_builtin_strcat (exp, target);
7553 if (target)
7554 return target;
7555 break;
7557 case BUILT_IN_STRCPY:
7558 target = expand_builtin_strcpy (exp, target);
7559 if (target)
7560 return target;
7561 break;
7563 case BUILT_IN_STRNCAT:
7564 target = expand_builtin_strncat (exp, target);
7565 if (target)
7566 return target;
7567 break;
7569 case BUILT_IN_STRNCPY:
7570 target = expand_builtin_strncpy (exp, target);
7571 if (target)
7572 return target;
7573 break;
7575 case BUILT_IN_STPCPY:
7576 target = expand_builtin_stpcpy (exp, target, mode);
7577 if (target)
7578 return target;
7579 break;
7581 case BUILT_IN_STPNCPY:
7582 target = expand_builtin_stpncpy (exp, target);
7583 if (target)
7584 return target;
7585 break;
7587 case BUILT_IN_MEMCHR:
7588 target = expand_builtin_memchr (exp, target);
7589 if (target)
7590 return target;
7591 break;
7593 case BUILT_IN_MEMCPY:
7594 target = expand_builtin_memcpy (exp, target);
7595 if (target)
7596 return target;
7597 break;
7599 case BUILT_IN_MEMMOVE:
7600 target = expand_builtin_memmove (exp, target);
7601 if (target)
7602 return target;
7603 break;
7605 case BUILT_IN_MEMPCPY:
7606 target = expand_builtin_mempcpy (exp, target);
7607 if (target)
7608 return target;
7609 break;
7611 case BUILT_IN_MEMSET:
7612 target = expand_builtin_memset (exp, target, mode);
7613 if (target)
7614 return target;
7615 break;
7617 case BUILT_IN_BZERO:
7618 target = expand_builtin_bzero (exp);
7619 if (target)
7620 return target;
7621 break;
7623 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7624 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7625 when changing it to a strcmp call. */
7626 case BUILT_IN_STRCMP_EQ:
7627 target = expand_builtin_memcmp (exp, target, true);
7628 if (target)
7629 return target;
7631 /* Change this call back to a BUILT_IN_STRCMP. */
7632 TREE_OPERAND (exp, 1)
7633 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7635 /* Delete the last parameter. */
7636 unsigned int i;
7637 vec<tree, va_gc> *arg_vec;
7638 vec_alloc (arg_vec, 2);
7639 for (i = 0; i < 2; i++)
7640 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7641 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7642 /* FALLTHROUGH */
7644 case BUILT_IN_STRCMP:
7645 target = expand_builtin_strcmp (exp, target);
7646 if (target)
7647 return target;
7648 break;
7650 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7651 back to a BUILT_IN_STRNCMP. */
7652 case BUILT_IN_STRNCMP_EQ:
7653 target = expand_builtin_memcmp (exp, target, true);
7654 if (target)
7655 return target;
7657 /* Change it back to a BUILT_IN_STRNCMP. */
7658 TREE_OPERAND (exp, 1)
7659 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7660 /* FALLTHROUGH */
7662 case BUILT_IN_STRNCMP:
7663 target = expand_builtin_strncmp (exp, target, mode);
7664 if (target)
7665 return target;
7666 break;
7668 case BUILT_IN_BCMP:
7669 case BUILT_IN_MEMCMP:
7670 case BUILT_IN_MEMCMP_EQ:
7671 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7672 if (target)
7673 return target;
7674 if (fcode == BUILT_IN_MEMCMP_EQ)
7676 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7677 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7679 break;
7681 case BUILT_IN_SETJMP:
7682 /* This should have been lowered to the builtins below. */
7683 gcc_unreachable ();
7685 case BUILT_IN_SETJMP_SETUP:
7686 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7687 and the receiver label. */
7688 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7690 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7691 VOIDmode, EXPAND_NORMAL);
7692 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7693 rtx_insn *label_r = label_rtx (label);
7695 /* This is copied from the handling of non-local gotos. */
7696 expand_builtin_setjmp_setup (buf_addr, label_r);
7697 nonlocal_goto_handler_labels
7698 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7699 nonlocal_goto_handler_labels);
7700 /* ??? Do not let expand_label treat us as such since we would
7701 not want to be both on the list of non-local labels and on
7702 the list of forced labels. */
7703 FORCED_LABEL (label) = 0;
7704 return const0_rtx;
7706 break;
7708 case BUILT_IN_SETJMP_RECEIVER:
7709 /* __builtin_setjmp_receiver is passed the receiver label. */
7710 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7712 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7713 rtx_insn *label_r = label_rtx (label);
7715 expand_builtin_setjmp_receiver (label_r);
7716 return const0_rtx;
7718 break;
7720 /* __builtin_longjmp is passed a pointer to an array of five words.
7721 It's similar to the C library longjmp function but works with
7722 __builtin_setjmp above. */
7723 case BUILT_IN_LONGJMP:
7724 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7726 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7727 VOIDmode, EXPAND_NORMAL);
7728 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7730 if (value != const1_rtx)
7732 error ("%<__builtin_longjmp%> second argument must be 1");
7733 return const0_rtx;
7736 expand_builtin_longjmp (buf_addr, value);
7737 return const0_rtx;
7739 break;
7741 case BUILT_IN_NONLOCAL_GOTO:
7742 target = expand_builtin_nonlocal_goto (exp);
7743 if (target)
7744 return target;
7745 break;
7747 /* This updates the setjmp buffer that is its argument with the value
7748 of the current stack pointer. */
7749 case BUILT_IN_UPDATE_SETJMP_BUF:
7750 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7752 rtx buf_addr
7753 = expand_normal (CALL_EXPR_ARG (exp, 0));
7755 expand_builtin_update_setjmp_buf (buf_addr);
7756 return const0_rtx;
7758 break;
7760 case BUILT_IN_TRAP:
7761 expand_builtin_trap ();
7762 return const0_rtx;
7764 case BUILT_IN_UNREACHABLE:
7765 expand_builtin_unreachable ();
7766 return const0_rtx;
7768 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7769 case BUILT_IN_SIGNBITD32:
7770 case BUILT_IN_SIGNBITD64:
7771 case BUILT_IN_SIGNBITD128:
7772 target = expand_builtin_signbit (exp, target);
7773 if (target)
7774 return target;
7775 break;
7777 /* Various hooks for the DWARF 2 __throw routine. */
7778 case BUILT_IN_UNWIND_INIT:
7779 expand_builtin_unwind_init ();
7780 return const0_rtx;
7781 case BUILT_IN_DWARF_CFA:
7782 return virtual_cfa_rtx;
7783 #ifdef DWARF2_UNWIND_INFO
7784 case BUILT_IN_DWARF_SP_COLUMN:
7785 return expand_builtin_dwarf_sp_column ();
7786 case BUILT_IN_INIT_DWARF_REG_SIZES:
7787 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7788 return const0_rtx;
7789 #endif
7790 case BUILT_IN_FROB_RETURN_ADDR:
7791 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7792 case BUILT_IN_EXTRACT_RETURN_ADDR:
7793 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7794 case BUILT_IN_EH_RETURN:
7795 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7796 CALL_EXPR_ARG (exp, 1));
7797 return const0_rtx;
7798 case BUILT_IN_EH_RETURN_DATA_REGNO:
7799 return expand_builtin_eh_return_data_regno (exp);
7800 case BUILT_IN_EXTEND_POINTER:
7801 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7802 case BUILT_IN_EH_POINTER:
7803 return expand_builtin_eh_pointer (exp);
7804 case BUILT_IN_EH_FILTER:
7805 return expand_builtin_eh_filter (exp);
7806 case BUILT_IN_EH_COPY_VALUES:
7807 return expand_builtin_eh_copy_values (exp);
7809 case BUILT_IN_VA_START:
7810 return expand_builtin_va_start (exp);
7811 case BUILT_IN_VA_END:
7812 return expand_builtin_va_end (exp);
7813 case BUILT_IN_VA_COPY:
7814 return expand_builtin_va_copy (exp);
7815 case BUILT_IN_EXPECT:
7816 return expand_builtin_expect (exp, target);
7817 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7818 return expand_builtin_expect_with_probability (exp, target);
7819 case BUILT_IN_ASSUME_ALIGNED:
7820 return expand_builtin_assume_aligned (exp, target);
7821 case BUILT_IN_PREFETCH:
7822 expand_builtin_prefetch (exp);
7823 return const0_rtx;
7825 case BUILT_IN_INIT_TRAMPOLINE:
7826 return expand_builtin_init_trampoline (exp, true);
7827 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7828 return expand_builtin_init_trampoline (exp, false);
7829 case BUILT_IN_ADJUST_TRAMPOLINE:
7830 return expand_builtin_adjust_trampoline (exp);
7832 case BUILT_IN_INIT_DESCRIPTOR:
7833 return expand_builtin_init_descriptor (exp);
7834 case BUILT_IN_ADJUST_DESCRIPTOR:
7835 return expand_builtin_adjust_descriptor (exp);
7837 case BUILT_IN_FORK:
7838 case BUILT_IN_EXECL:
7839 case BUILT_IN_EXECV:
7840 case BUILT_IN_EXECLP:
7841 case BUILT_IN_EXECLE:
7842 case BUILT_IN_EXECVP:
7843 case BUILT_IN_EXECVE:
7844 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7845 if (target)
7846 return target;
7847 break;
7849 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7850 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7851 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7852 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7853 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7854 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7855 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7856 if (target)
7857 return target;
7858 break;
7860 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7861 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7862 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7863 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7864 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7865 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7866 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7867 if (target)
7868 return target;
7869 break;
7871 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7872 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7873 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7874 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7875 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7876 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7877 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7878 if (target)
7879 return target;
7880 break;
7882 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7883 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7884 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7885 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7886 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7887 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7888 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7889 if (target)
7890 return target;
7891 break;
7893 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7894 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7895 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7896 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7897 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7898 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7899 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7900 if (target)
7901 return target;
7902 break;
7904 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7905 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7906 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7907 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7908 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7909 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7910 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7911 if (target)
7912 return target;
7913 break;
7915 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7916 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7917 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7918 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7919 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7920 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7921 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7922 if (target)
7923 return target;
7924 break;
7926 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7927 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7928 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7929 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7930 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7931 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7932 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7933 if (target)
7934 return target;
7935 break;
7937 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7938 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7939 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7940 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7941 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7942 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7943 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7944 if (target)
7945 return target;
7946 break;
7948 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7949 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7950 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7951 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7952 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7953 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7954 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7955 if (target)
7956 return target;
7957 break;
7959 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7960 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7961 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7962 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7963 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7964 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7965 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7966 if (target)
7967 return target;
7968 break;
7970 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7971 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7972 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7973 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7974 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7975 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7976 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7977 if (target)
7978 return target;
7979 break;
7981 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7982 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7983 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7984 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7985 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7986 if (mode == VOIDmode)
7987 mode = TYPE_MODE (boolean_type_node);
7988 if (!target || !register_operand (target, mode))
7989 target = gen_reg_rtx (mode);
7991 mode = get_builtin_sync_mode
7992 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7993 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7994 if (target)
7995 return target;
7996 break;
7998 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7999 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8000 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8001 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8002 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8003 mode = get_builtin_sync_mode
8004 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8005 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8006 if (target)
8007 return target;
8008 break;
8010 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8011 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8012 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8013 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8014 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8015 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8016 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8017 if (target)
8018 return target;
8019 break;
8021 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8022 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8023 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8024 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8025 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8026 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8027 expand_builtin_sync_lock_release (mode, exp);
8028 return const0_rtx;
8030 case BUILT_IN_SYNC_SYNCHRONIZE:
8031 expand_builtin_sync_synchronize ();
8032 return const0_rtx;
8034 case BUILT_IN_ATOMIC_EXCHANGE_1:
8035 case BUILT_IN_ATOMIC_EXCHANGE_2:
8036 case BUILT_IN_ATOMIC_EXCHANGE_4:
8037 case BUILT_IN_ATOMIC_EXCHANGE_8:
8038 case BUILT_IN_ATOMIC_EXCHANGE_16:
8039 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8040 target = expand_builtin_atomic_exchange (mode, exp, target);
8041 if (target)
8042 return target;
8043 break;
8045 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8046 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8047 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8048 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8049 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8051 unsigned int nargs, z;
8052 vec<tree, va_gc> *vec;
8054 mode =
8055 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8056 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8057 if (target)
8058 return target;
8060 /* If this is turned into an external library call, the weak parameter
8061 must be dropped to match the expected parameter list. */
8062 nargs = call_expr_nargs (exp);
8063 vec_alloc (vec, nargs - 1);
8064 for (z = 0; z < 3; z++)
8065 vec->quick_push (CALL_EXPR_ARG (exp, z));
8066 /* Skip the boolean weak parameter. */
8067 for (z = 4; z < 6; z++)
8068 vec->quick_push (CALL_EXPR_ARG (exp, z));
8069 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8070 break;
8073 case BUILT_IN_ATOMIC_LOAD_1:
8074 case BUILT_IN_ATOMIC_LOAD_2:
8075 case BUILT_IN_ATOMIC_LOAD_4:
8076 case BUILT_IN_ATOMIC_LOAD_8:
8077 case BUILT_IN_ATOMIC_LOAD_16:
8078 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8079 target = expand_builtin_atomic_load (mode, exp, target);
8080 if (target)
8081 return target;
8082 break;
8084 case BUILT_IN_ATOMIC_STORE_1:
8085 case BUILT_IN_ATOMIC_STORE_2:
8086 case BUILT_IN_ATOMIC_STORE_4:
8087 case BUILT_IN_ATOMIC_STORE_8:
8088 case BUILT_IN_ATOMIC_STORE_16:
8089 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8090 target = expand_builtin_atomic_store (mode, exp);
8091 if (target)
8092 return const0_rtx;
8093 break;
8095 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8096 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8097 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8098 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8099 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8101 enum built_in_function lib;
8102 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8103 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8104 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8105 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8106 ignore, lib);
8107 if (target)
8108 return target;
8109 break;
8111 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8112 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8113 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8114 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8115 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8117 enum built_in_function lib;
8118 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8119 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8120 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8121 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8122 ignore, lib);
8123 if (target)
8124 return target;
8125 break;
8127 case BUILT_IN_ATOMIC_AND_FETCH_1:
8128 case BUILT_IN_ATOMIC_AND_FETCH_2:
8129 case BUILT_IN_ATOMIC_AND_FETCH_4:
8130 case BUILT_IN_ATOMIC_AND_FETCH_8:
8131 case BUILT_IN_ATOMIC_AND_FETCH_16:
8133 enum built_in_function lib;
8134 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8135 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8136 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8137 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8138 ignore, lib);
8139 if (target)
8140 return target;
8141 break;
8143 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8144 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8145 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8146 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8147 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8149 enum built_in_function lib;
8150 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8151 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8152 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8153 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8154 ignore, lib);
8155 if (target)
8156 return target;
8157 break;
8159 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8160 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8161 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8162 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8163 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8165 enum built_in_function lib;
8166 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8167 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8168 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8169 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8170 ignore, lib);
8171 if (target)
8172 return target;
8173 break;
8175 case BUILT_IN_ATOMIC_OR_FETCH_1:
8176 case BUILT_IN_ATOMIC_OR_FETCH_2:
8177 case BUILT_IN_ATOMIC_OR_FETCH_4:
8178 case BUILT_IN_ATOMIC_OR_FETCH_8:
8179 case BUILT_IN_ATOMIC_OR_FETCH_16:
8181 enum built_in_function lib;
8182 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8183 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8184 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8185 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8186 ignore, lib);
8187 if (target)
8188 return target;
8189 break;
8191 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8192 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8193 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8194 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8195 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8196 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8197 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8198 ignore, BUILT_IN_NONE);
8199 if (target)
8200 return target;
8201 break;
8203 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8204 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8205 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8206 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8207 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8208 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8209 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8210 ignore, BUILT_IN_NONE);
8211 if (target)
8212 return target;
8213 break;
8215 case BUILT_IN_ATOMIC_FETCH_AND_1:
8216 case BUILT_IN_ATOMIC_FETCH_AND_2:
8217 case BUILT_IN_ATOMIC_FETCH_AND_4:
8218 case BUILT_IN_ATOMIC_FETCH_AND_8:
8219 case BUILT_IN_ATOMIC_FETCH_AND_16:
8220 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8221 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8222 ignore, BUILT_IN_NONE);
8223 if (target)
8224 return target;
8225 break;
8227 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8228 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8229 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8230 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8231 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8232 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8233 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8234 ignore, BUILT_IN_NONE);
8235 if (target)
8236 return target;
8237 break;
8239 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8240 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8241 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8242 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8243 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8244 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8245 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8246 ignore, BUILT_IN_NONE);
8247 if (target)
8248 return target;
8249 break;
8251 case BUILT_IN_ATOMIC_FETCH_OR_1:
8252 case BUILT_IN_ATOMIC_FETCH_OR_2:
8253 case BUILT_IN_ATOMIC_FETCH_OR_4:
8254 case BUILT_IN_ATOMIC_FETCH_OR_8:
8255 case BUILT_IN_ATOMIC_FETCH_OR_16:
8256 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8257 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8258 ignore, BUILT_IN_NONE);
8259 if (target)
8260 return target;
8261 break;
8263 case BUILT_IN_ATOMIC_TEST_AND_SET:
8264 return expand_builtin_atomic_test_and_set (exp, target);
8266 case BUILT_IN_ATOMIC_CLEAR:
8267 return expand_builtin_atomic_clear (exp);
8269 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8270 return expand_builtin_atomic_always_lock_free (exp);
8272 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8273 target = expand_builtin_atomic_is_lock_free (exp);
8274 if (target)
8275 return target;
8276 break;
8278 case BUILT_IN_ATOMIC_THREAD_FENCE:
8279 expand_builtin_atomic_thread_fence (exp);
8280 return const0_rtx;
8282 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8283 expand_builtin_atomic_signal_fence (exp);
8284 return const0_rtx;
8286 case BUILT_IN_OBJECT_SIZE:
8287 return expand_builtin_object_size (exp);
8289 case BUILT_IN_MEMCPY_CHK:
8290 case BUILT_IN_MEMPCPY_CHK:
8291 case BUILT_IN_MEMMOVE_CHK:
8292 case BUILT_IN_MEMSET_CHK:
8293 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8294 if (target)
8295 return target;
8296 break;
8298 case BUILT_IN_STRCPY_CHK:
8299 case BUILT_IN_STPCPY_CHK:
8300 case BUILT_IN_STRNCPY_CHK:
8301 case BUILT_IN_STPNCPY_CHK:
8302 case BUILT_IN_STRCAT_CHK:
8303 case BUILT_IN_STRNCAT_CHK:
8304 case BUILT_IN_SNPRINTF_CHK:
8305 case BUILT_IN_VSNPRINTF_CHK:
8306 maybe_emit_chk_warning (exp, fcode);
8307 break;
8309 case BUILT_IN_SPRINTF_CHK:
8310 case BUILT_IN_VSPRINTF_CHK:
8311 maybe_emit_sprintf_chk_warning (exp, fcode);
8312 break;
8314 case BUILT_IN_FREE:
8315 if (warn_free_nonheap_object)
8316 maybe_emit_free_warning (exp);
8317 break;
8319 case BUILT_IN_THREAD_POINTER:
8320 return expand_builtin_thread_pointer (exp, target);
8322 case BUILT_IN_SET_THREAD_POINTER:
8323 expand_builtin_set_thread_pointer (exp);
8324 return const0_rtx;
8326 case BUILT_IN_ACC_ON_DEVICE:
8327 /* Do library call, if we failed to expand the builtin when
8328 folding. */
8329 break;
8331 case BUILT_IN_GOACC_PARLEVEL_ID:
8332 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8333 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8335 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8336 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8338 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8339 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8340 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8341 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8342 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8343 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8344 return expand_speculation_safe_value (mode, exp, target, ignore);
8346 default: /* just do library call, if unknown builtin */
8347 break;
8350 /* The switch statement above can drop through to cause the function
8351 to be called normally. */
8352 return expand_call (exp, target, ignore);
8355 /* Determine whether a tree node represents a call to a built-in
8356 function. If the tree T is a call to a built-in function with
8357 the right number of arguments of the appropriate types, return
8358 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8359 Otherwise the return value is END_BUILTINS. */
8361 enum built_in_function
8362 builtin_mathfn_code (const_tree t)
8364 const_tree fndecl, arg, parmlist;
8365 const_tree argtype, parmtype;
8366 const_call_expr_arg_iterator iter;
8368 if (TREE_CODE (t) != CALL_EXPR)
8369 return END_BUILTINS;
8371 fndecl = get_callee_fndecl (t);
8372 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8373 return END_BUILTINS;
8375 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8376 init_const_call_expr_arg_iterator (t, &iter);
8377 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8379 /* If a function doesn't take a variable number of arguments,
8380 the last element in the list will have type `void'. */
8381 parmtype = TREE_VALUE (parmlist);
8382 if (VOID_TYPE_P (parmtype))
8384 if (more_const_call_expr_args_p (&iter))
8385 return END_BUILTINS;
8386 return DECL_FUNCTION_CODE (fndecl);
8389 if (! more_const_call_expr_args_p (&iter))
8390 return END_BUILTINS;
8392 arg = next_const_call_expr_arg (&iter);
8393 argtype = TREE_TYPE (arg);
8395 if (SCALAR_FLOAT_TYPE_P (parmtype))
8397 if (! SCALAR_FLOAT_TYPE_P (argtype))
8398 return END_BUILTINS;
8400 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8402 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8403 return END_BUILTINS;
8405 else if (POINTER_TYPE_P (parmtype))
8407 if (! POINTER_TYPE_P (argtype))
8408 return END_BUILTINS;
8410 else if (INTEGRAL_TYPE_P (parmtype))
8412 if (! INTEGRAL_TYPE_P (argtype))
8413 return END_BUILTINS;
8415 else
8416 return END_BUILTINS;
8419 /* Variable-length argument list. */
8420 return DECL_FUNCTION_CODE (fndecl);
8423 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8424 evaluate to a constant. */
8426 static tree
8427 fold_builtin_constant_p (tree arg)
8429 /* We return 1 for a numeric type that's known to be a constant
8430 value at compile-time or for an aggregate type that's a
8431 literal constant. */
8432 STRIP_NOPS (arg);
8434 /* If we know this is a constant, emit the constant of one. */
8435 if (CONSTANT_CLASS_P (arg)
8436 || (TREE_CODE (arg) == CONSTRUCTOR
8437 && TREE_CONSTANT (arg)))
8438 return integer_one_node;
8439 if (TREE_CODE (arg) == ADDR_EXPR)
8441 tree op = TREE_OPERAND (arg, 0);
8442 if (TREE_CODE (op) == STRING_CST
8443 || (TREE_CODE (op) == ARRAY_REF
8444 && integer_zerop (TREE_OPERAND (op, 1))
8445 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8446 return integer_one_node;
8449 /* If this expression has side effects, show we don't know it to be a
8450 constant. Likewise if it's a pointer or aggregate type since in
8451 those case we only want literals, since those are only optimized
8452 when generating RTL, not later.
8453 And finally, if we are compiling an initializer, not code, we
8454 need to return a definite result now; there's not going to be any
8455 more optimization done. */
8456 if (TREE_SIDE_EFFECTS (arg)
8457 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8458 || POINTER_TYPE_P (TREE_TYPE (arg))
8459 || cfun == 0
8460 || folding_initializer
8461 || force_folding_builtin_constant_p)
8462 return integer_zero_node;
8464 return NULL_TREE;
8467 /* Create builtin_expect or builtin_expect_with_probability
8468 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8469 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8470 builtin_expect_with_probability instead uses third argument as PROBABILITY
8471 value. */
8473 static tree
8474 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8475 tree predictor, tree probability)
8477 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8479 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8480 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8481 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8482 ret_type = TREE_TYPE (TREE_TYPE (fn));
8483 pred_type = TREE_VALUE (arg_types);
8484 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8486 pred = fold_convert_loc (loc, pred_type, pred);
8487 expected = fold_convert_loc (loc, expected_type, expected);
8489 if (probability)
8490 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8491 else
8492 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8493 predictor);
8495 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8496 build_int_cst (ret_type, 0));
8499 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8500 NULL_TREE if no simplification is possible. */
8502 tree
8503 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8504 tree arg3)
8506 tree inner, fndecl, inner_arg0;
8507 enum tree_code code;
8509 /* Distribute the expected value over short-circuiting operators.
8510 See through the cast from truthvalue_type_node to long. */
8511 inner_arg0 = arg0;
8512 while (CONVERT_EXPR_P (inner_arg0)
8513 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8514 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8515 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8517 /* If this is a builtin_expect within a builtin_expect keep the
8518 inner one. See through a comparison against a constant. It
8519 might have been added to create a thruthvalue. */
8520 inner = inner_arg0;
8522 if (COMPARISON_CLASS_P (inner)
8523 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8524 inner = TREE_OPERAND (inner, 0);
8526 if (TREE_CODE (inner) == CALL_EXPR
8527 && (fndecl = get_callee_fndecl (inner))
8528 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8529 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8530 return arg0;
8532 inner = inner_arg0;
8533 code = TREE_CODE (inner);
8534 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8536 tree op0 = TREE_OPERAND (inner, 0);
8537 tree op1 = TREE_OPERAND (inner, 1);
8538 arg1 = save_expr (arg1);
8540 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8541 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8542 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8544 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8547 /* If the argument isn't invariant then there's nothing else we can do. */
8548 if (!TREE_CONSTANT (inner_arg0))
8549 return NULL_TREE;
8551 /* If we expect that a comparison against the argument will fold to
8552 a constant return the constant. In practice, this means a true
8553 constant or the address of a non-weak symbol. */
8554 inner = inner_arg0;
8555 STRIP_NOPS (inner);
8556 if (TREE_CODE (inner) == ADDR_EXPR)
8560 inner = TREE_OPERAND (inner, 0);
8562 while (TREE_CODE (inner) == COMPONENT_REF
8563 || TREE_CODE (inner) == ARRAY_REF);
8564 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8565 return NULL_TREE;
8568 /* Otherwise, ARG0 already has the proper type for the return value. */
8569 return arg0;
8572 /* Fold a call to __builtin_classify_type with argument ARG. */
8574 static tree
8575 fold_builtin_classify_type (tree arg)
8577 if (arg == 0)
8578 return build_int_cst (integer_type_node, no_type_class);
8580 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8583 /* Fold a call to __builtin_strlen with argument ARG. */
8585 static tree
8586 fold_builtin_strlen (location_t loc, tree type, tree arg)
8588 if (!validate_arg (arg, POINTER_TYPE))
8589 return NULL_TREE;
8590 else
8592 c_strlen_data lendata = { };
8593 tree len = c_strlen (arg, 0, &lendata);
8595 if (len)
8596 return fold_convert_loc (loc, type, len);
8598 if (!lendata.decl)
8599 c_strlen (arg, 1, &lendata);
8601 if (lendata.decl)
8603 if (EXPR_HAS_LOCATION (arg))
8604 loc = EXPR_LOCATION (arg);
8605 else if (loc == UNKNOWN_LOCATION)
8606 loc = input_location;
8607 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8610 return NULL_TREE;
8614 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8616 static tree
8617 fold_builtin_inf (location_t loc, tree type, int warn)
8619 REAL_VALUE_TYPE real;
8621 /* __builtin_inff is intended to be usable to define INFINITY on all
8622 targets. If an infinity is not available, INFINITY expands "to a
8623 positive constant of type float that overflows at translation
8624 time", footnote "In this case, using INFINITY will violate the
8625 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8626 Thus we pedwarn to ensure this constraint violation is
8627 diagnosed. */
8628 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8629 pedwarn (loc, 0, "target format does not support infinity");
8631 real_inf (&real);
8632 return build_real (type, real);
8635 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8636 NULL_TREE if no simplification can be made. */
8638 static tree
8639 fold_builtin_sincos (location_t loc,
8640 tree arg0, tree arg1, tree arg2)
8642 tree type;
8643 tree fndecl, call = NULL_TREE;
8645 if (!validate_arg (arg0, REAL_TYPE)
8646 || !validate_arg (arg1, POINTER_TYPE)
8647 || !validate_arg (arg2, POINTER_TYPE))
8648 return NULL_TREE;
8650 type = TREE_TYPE (arg0);
8652 /* Calculate the result when the argument is a constant. */
8653 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8654 if (fn == END_BUILTINS)
8655 return NULL_TREE;
8657 /* Canonicalize sincos to cexpi. */
8658 if (TREE_CODE (arg0) == REAL_CST)
8660 tree complex_type = build_complex_type (type);
8661 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8663 if (!call)
8665 if (!targetm.libc_has_function (function_c99_math_complex)
8666 || !builtin_decl_implicit_p (fn))
8667 return NULL_TREE;
8668 fndecl = builtin_decl_explicit (fn);
8669 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8670 call = builtin_save_expr (call);
8673 tree ptype = build_pointer_type (type);
8674 arg1 = fold_convert (ptype, arg1);
8675 arg2 = fold_convert (ptype, arg2);
8676 return build2 (COMPOUND_EXPR, void_type_node,
8677 build2 (MODIFY_EXPR, void_type_node,
8678 build_fold_indirect_ref_loc (loc, arg1),
8679 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8680 build2 (MODIFY_EXPR, void_type_node,
8681 build_fold_indirect_ref_loc (loc, arg2),
8682 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8685 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8686 Return NULL_TREE if no simplification can be made. */
8688 static tree
8689 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8691 if (!validate_arg (arg1, POINTER_TYPE)
8692 || !validate_arg (arg2, POINTER_TYPE)
8693 || !validate_arg (len, INTEGER_TYPE))
8694 return NULL_TREE;
8696 /* If the LEN parameter is zero, return zero. */
8697 if (integer_zerop (len))
8698 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8699 arg1, arg2);
8701 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8702 if (operand_equal_p (arg1, arg2, 0))
8703 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8705 /* If len parameter is one, return an expression corresponding to
8706 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8707 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8709 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8710 tree cst_uchar_ptr_node
8711 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8713 tree ind1
8714 = fold_convert_loc (loc, integer_type_node,
8715 build1 (INDIRECT_REF, cst_uchar_node,
8716 fold_convert_loc (loc,
8717 cst_uchar_ptr_node,
8718 arg1)));
8719 tree ind2
8720 = fold_convert_loc (loc, integer_type_node,
8721 build1 (INDIRECT_REF, cst_uchar_node,
8722 fold_convert_loc (loc,
8723 cst_uchar_ptr_node,
8724 arg2)));
8725 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8728 return NULL_TREE;
8731 /* Fold a call to builtin isascii with argument ARG. */
8733 static tree
8734 fold_builtin_isascii (location_t loc, tree arg)
8736 if (!validate_arg (arg, INTEGER_TYPE))
8737 return NULL_TREE;
8738 else
8740 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8741 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8742 build_int_cst (integer_type_node,
8743 ~ (unsigned HOST_WIDE_INT) 0x7f));
8744 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8745 arg, integer_zero_node);
8749 /* Fold a call to builtin toascii with argument ARG. */
8751 static tree
8752 fold_builtin_toascii (location_t loc, tree arg)
8754 if (!validate_arg (arg, INTEGER_TYPE))
8755 return NULL_TREE;
8757 /* Transform toascii(c) -> (c & 0x7f). */
8758 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8759 build_int_cst (integer_type_node, 0x7f));
8762 /* Fold a call to builtin isdigit with argument ARG. */
8764 static tree
8765 fold_builtin_isdigit (location_t loc, tree arg)
8767 if (!validate_arg (arg, INTEGER_TYPE))
8768 return NULL_TREE;
8769 else
8771 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8772 /* According to the C standard, isdigit is unaffected by locale.
8773 However, it definitely is affected by the target character set. */
8774 unsigned HOST_WIDE_INT target_digit0
8775 = lang_hooks.to_target_charset ('0');
8777 if (target_digit0 == 0)
8778 return NULL_TREE;
8780 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8781 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8782 build_int_cst (unsigned_type_node, target_digit0));
8783 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8784 build_int_cst (unsigned_type_node, 9));
8788 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8790 static tree
8791 fold_builtin_fabs (location_t loc, tree arg, tree type)
8793 if (!validate_arg (arg, REAL_TYPE))
8794 return NULL_TREE;
8796 arg = fold_convert_loc (loc, type, arg);
8797 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8800 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8802 static tree
8803 fold_builtin_abs (location_t loc, tree arg, tree type)
8805 if (!validate_arg (arg, INTEGER_TYPE))
8806 return NULL_TREE;
8808 arg = fold_convert_loc (loc, type, arg);
8809 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8812 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8814 static tree
8815 fold_builtin_carg (location_t loc, tree arg, tree type)
8817 if (validate_arg (arg, COMPLEX_TYPE)
8818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8820 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8822 if (atan2_fn)
8824 tree new_arg = builtin_save_expr (arg);
8825 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8826 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8827 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8831 return NULL_TREE;
8834 /* Fold a call to builtin frexp, we can assume the base is 2. */
8836 static tree
8837 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8839 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8840 return NULL_TREE;
8842 STRIP_NOPS (arg0);
8844 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8845 return NULL_TREE;
8847 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8849 /* Proceed if a valid pointer type was passed in. */
8850 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8852 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8853 tree frac, exp;
8855 switch (value->cl)
8857 case rvc_zero:
8858 /* For +-0, return (*exp = 0, +-0). */
8859 exp = integer_zero_node;
8860 frac = arg0;
8861 break;
8862 case rvc_nan:
8863 case rvc_inf:
8864 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8865 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8866 case rvc_normal:
8868 /* Since the frexp function always expects base 2, and in
8869 GCC normalized significands are already in the range
8870 [0.5, 1.0), we have exactly what frexp wants. */
8871 REAL_VALUE_TYPE frac_rvt = *value;
8872 SET_REAL_EXP (&frac_rvt, 0);
8873 frac = build_real (rettype, frac_rvt);
8874 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8876 break;
8877 default:
8878 gcc_unreachable ();
8881 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8882 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8883 TREE_SIDE_EFFECTS (arg1) = 1;
8884 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8887 return NULL_TREE;
8890 /* Fold a call to builtin modf. */
8892 static tree
8893 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8895 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8896 return NULL_TREE;
8898 STRIP_NOPS (arg0);
8900 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8901 return NULL_TREE;
8903 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8905 /* Proceed if a valid pointer type was passed in. */
8906 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8908 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8909 REAL_VALUE_TYPE trunc, frac;
8911 switch (value->cl)
8913 case rvc_nan:
8914 case rvc_zero:
8915 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8916 trunc = frac = *value;
8917 break;
8918 case rvc_inf:
8919 /* For +-Inf, return (*arg1 = arg0, +-0). */
8920 frac = dconst0;
8921 frac.sign = value->sign;
8922 trunc = *value;
8923 break;
8924 case rvc_normal:
8925 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8926 real_trunc (&trunc, VOIDmode, value);
8927 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8928 /* If the original number was negative and already
8929 integral, then the fractional part is -0.0. */
8930 if (value->sign && frac.cl == rvc_zero)
8931 frac.sign = value->sign;
8932 break;
8935 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8936 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8937 build_real (rettype, trunc));
8938 TREE_SIDE_EFFECTS (arg1) = 1;
8939 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8940 build_real (rettype, frac));
8943 return NULL_TREE;
8946 /* Given a location LOC, an interclass builtin function decl FNDECL
8947 and its single argument ARG, return an folded expression computing
8948 the same, or NULL_TREE if we either couldn't or didn't want to fold
8949 (the latter happen if there's an RTL instruction available). */
8951 static tree
8952 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8954 machine_mode mode;
8956 if (!validate_arg (arg, REAL_TYPE))
8957 return NULL_TREE;
8959 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8960 return NULL_TREE;
8962 mode = TYPE_MODE (TREE_TYPE (arg));
8964 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8966 /* If there is no optab, try generic code. */
8967 switch (DECL_FUNCTION_CODE (fndecl))
8969 tree result;
8971 CASE_FLT_FN (BUILT_IN_ISINF):
8973 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8974 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8975 tree type = TREE_TYPE (arg);
8976 REAL_VALUE_TYPE r;
8977 char buf[128];
8979 if (is_ibm_extended)
8981 /* NaN and Inf are encoded in the high-order double value
8982 only. The low-order value is not significant. */
8983 type = double_type_node;
8984 mode = DFmode;
8985 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8987 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8988 real_from_string (&r, buf);
8989 result = build_call_expr (isgr_fn, 2,
8990 fold_build1_loc (loc, ABS_EXPR, type, arg),
8991 build_real (type, r));
8992 return result;
8994 CASE_FLT_FN (BUILT_IN_FINITE):
8995 case BUILT_IN_ISFINITE:
8997 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8998 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8999 tree type = TREE_TYPE (arg);
9000 REAL_VALUE_TYPE r;
9001 char buf[128];
9003 if (is_ibm_extended)
9005 /* NaN and Inf are encoded in the high-order double value
9006 only. The low-order value is not significant. */
9007 type = double_type_node;
9008 mode = DFmode;
9009 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9011 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9012 real_from_string (&r, buf);
9013 result = build_call_expr (isle_fn, 2,
9014 fold_build1_loc (loc, ABS_EXPR, type, arg),
9015 build_real (type, r));
9016 /*result = fold_build2_loc (loc, UNGT_EXPR,
9017 TREE_TYPE (TREE_TYPE (fndecl)),
9018 fold_build1_loc (loc, ABS_EXPR, type, arg),
9019 build_real (type, r));
9020 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9021 TREE_TYPE (TREE_TYPE (fndecl)),
9022 result);*/
9023 return result;
9025 case BUILT_IN_ISNORMAL:
9027 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9028 islessequal(fabs(x),DBL_MAX). */
9029 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9030 tree type = TREE_TYPE (arg);
9031 tree orig_arg, max_exp, min_exp;
9032 machine_mode orig_mode = mode;
9033 REAL_VALUE_TYPE rmax, rmin;
9034 char buf[128];
9036 orig_arg = arg = builtin_save_expr (arg);
9037 if (is_ibm_extended)
9039 /* Use double to test the normal range of IBM extended
9040 precision. Emin for IBM extended precision is
9041 different to emin for IEEE double, being 53 higher
9042 since the low double exponent is at least 53 lower
9043 than the high double exponent. */
9044 type = double_type_node;
9045 mode = DFmode;
9046 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9048 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9050 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9051 real_from_string (&rmax, buf);
9052 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9053 real_from_string (&rmin, buf);
9054 max_exp = build_real (type, rmax);
9055 min_exp = build_real (type, rmin);
9057 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9058 if (is_ibm_extended)
9060 /* Testing the high end of the range is done just using
9061 the high double, using the same test as isfinite().
9062 For the subnormal end of the range we first test the
9063 high double, then if its magnitude is equal to the
9064 limit of 0x1p-969, we test whether the low double is
9065 non-zero and opposite sign to the high double. */
9066 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9067 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9068 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9069 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9070 arg, min_exp);
9071 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9072 complex_double_type_node, orig_arg);
9073 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9074 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9075 tree zero = build_real (type, dconst0);
9076 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9077 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9078 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9079 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9080 fold_build3 (COND_EXPR,
9081 integer_type_node,
9082 hilt, logt, lolt));
9083 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9084 eq_min, ok_lo);
9085 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9086 gt_min, eq_min);
9088 else
9090 tree const isge_fn
9091 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9092 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9094 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9095 max_exp, min_exp);
9096 return result;
9098 default:
9099 break;
9102 return NULL_TREE;
9105 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9106 ARG is the argument for the call. */
9108 static tree
9109 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9111 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9113 if (!validate_arg (arg, REAL_TYPE))
9114 return NULL_TREE;
9116 switch (builtin_index)
9118 case BUILT_IN_ISINF:
9119 if (!HONOR_INFINITIES (arg))
9120 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9122 return NULL_TREE;
9124 case BUILT_IN_ISINF_SIGN:
9126 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9127 /* In a boolean context, GCC will fold the inner COND_EXPR to
9128 1. So e.g. "if (isinf_sign(x))" would be folded to just
9129 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9130 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9131 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9132 tree tmp = NULL_TREE;
9134 arg = builtin_save_expr (arg);
9136 if (signbit_fn && isinf_fn)
9138 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9139 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9141 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9142 signbit_call, integer_zero_node);
9143 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9144 isinf_call, integer_zero_node);
9146 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9147 integer_minus_one_node, integer_one_node);
9148 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9149 isinf_call, tmp,
9150 integer_zero_node);
9153 return tmp;
9156 case BUILT_IN_ISFINITE:
9157 if (!HONOR_NANS (arg)
9158 && !HONOR_INFINITIES (arg))
9159 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9161 return NULL_TREE;
9163 case BUILT_IN_ISNAN:
9164 if (!HONOR_NANS (arg))
9165 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9168 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9169 if (is_ibm_extended)
9171 /* NaN and Inf are encoded in the high-order double value
9172 only. The low-order value is not significant. */
9173 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9176 arg = builtin_save_expr (arg);
9177 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9179 default:
9180 gcc_unreachable ();
9184 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9185 This builtin will generate code to return the appropriate floating
9186 point classification depending on the value of the floating point
9187 number passed in. The possible return values must be supplied as
9188 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9189 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9190 one floating point argument which is "type generic". */
9192 static tree
9193 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9195 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9196 arg, type, res, tmp;
9197 machine_mode mode;
9198 REAL_VALUE_TYPE r;
9199 char buf[128];
9201 /* Verify the required arguments in the original call. */
9202 if (nargs != 6
9203 || !validate_arg (args[0], INTEGER_TYPE)
9204 || !validate_arg (args[1], INTEGER_TYPE)
9205 || !validate_arg (args[2], INTEGER_TYPE)
9206 || !validate_arg (args[3], INTEGER_TYPE)
9207 || !validate_arg (args[4], INTEGER_TYPE)
9208 || !validate_arg (args[5], REAL_TYPE))
9209 return NULL_TREE;
9211 fp_nan = args[0];
9212 fp_infinite = args[1];
9213 fp_normal = args[2];
9214 fp_subnormal = args[3];
9215 fp_zero = args[4];
9216 arg = args[5];
9217 type = TREE_TYPE (arg);
9218 mode = TYPE_MODE (type);
9219 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9221 /* fpclassify(x) ->
9222 isnan(x) ? FP_NAN :
9223 (fabs(x) == Inf ? FP_INFINITE :
9224 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9225 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9227 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9228 build_real (type, dconst0));
9229 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9230 tmp, fp_zero, fp_subnormal);
9232 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9233 real_from_string (&r, buf);
9234 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9235 arg, build_real (type, r));
9236 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9238 if (HONOR_INFINITIES (mode))
9240 real_inf (&r);
9241 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9242 build_real (type, r));
9243 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9244 fp_infinite, res);
9247 if (HONOR_NANS (mode))
9249 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9250 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9253 return res;
9256 /* Fold a call to an unordered comparison function such as
9257 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9258 being called and ARG0 and ARG1 are the arguments for the call.
9259 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9260 the opposite of the desired result. UNORDERED_CODE is used
9261 for modes that can hold NaNs and ORDERED_CODE is used for
9262 the rest. */
9264 static tree
9265 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9266 enum tree_code unordered_code,
9267 enum tree_code ordered_code)
9269 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9270 enum tree_code code;
9271 tree type0, type1;
9272 enum tree_code code0, code1;
9273 tree cmp_type = NULL_TREE;
9275 type0 = TREE_TYPE (arg0);
9276 type1 = TREE_TYPE (arg1);
9278 code0 = TREE_CODE (type0);
9279 code1 = TREE_CODE (type1);
9281 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9282 /* Choose the wider of two real types. */
9283 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9284 ? type0 : type1;
9285 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9286 cmp_type = type0;
9287 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9288 cmp_type = type1;
9290 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9291 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9293 if (unordered_code == UNORDERED_EXPR)
9295 if (!HONOR_NANS (arg0))
9296 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9297 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9300 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9301 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9302 fold_build2_loc (loc, code, type, arg0, arg1));
9305 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9306 arithmetics if it can never overflow, or into internal functions that
9307 return both result of arithmetics and overflowed boolean flag in
9308 a complex integer result, or some other check for overflow.
9309 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9310 checking part of that. */
9312 static tree
9313 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9314 tree arg0, tree arg1, tree arg2)
9316 enum internal_fn ifn = IFN_LAST;
9317 /* The code of the expression corresponding to the built-in. */
9318 enum tree_code opcode = ERROR_MARK;
9319 bool ovf_only = false;
9321 switch (fcode)
9323 case BUILT_IN_ADD_OVERFLOW_P:
9324 ovf_only = true;
9325 /* FALLTHRU */
9326 case BUILT_IN_ADD_OVERFLOW:
9327 case BUILT_IN_SADD_OVERFLOW:
9328 case BUILT_IN_SADDL_OVERFLOW:
9329 case BUILT_IN_SADDLL_OVERFLOW:
9330 case BUILT_IN_UADD_OVERFLOW:
9331 case BUILT_IN_UADDL_OVERFLOW:
9332 case BUILT_IN_UADDLL_OVERFLOW:
9333 opcode = PLUS_EXPR;
9334 ifn = IFN_ADD_OVERFLOW;
9335 break;
9336 case BUILT_IN_SUB_OVERFLOW_P:
9337 ovf_only = true;
9338 /* FALLTHRU */
9339 case BUILT_IN_SUB_OVERFLOW:
9340 case BUILT_IN_SSUB_OVERFLOW:
9341 case BUILT_IN_SSUBL_OVERFLOW:
9342 case BUILT_IN_SSUBLL_OVERFLOW:
9343 case BUILT_IN_USUB_OVERFLOW:
9344 case BUILT_IN_USUBL_OVERFLOW:
9345 case BUILT_IN_USUBLL_OVERFLOW:
9346 opcode = MINUS_EXPR;
9347 ifn = IFN_SUB_OVERFLOW;
9348 break;
9349 case BUILT_IN_MUL_OVERFLOW_P:
9350 ovf_only = true;
9351 /* FALLTHRU */
9352 case BUILT_IN_MUL_OVERFLOW:
9353 case BUILT_IN_SMUL_OVERFLOW:
9354 case BUILT_IN_SMULL_OVERFLOW:
9355 case BUILT_IN_SMULLL_OVERFLOW:
9356 case BUILT_IN_UMUL_OVERFLOW:
9357 case BUILT_IN_UMULL_OVERFLOW:
9358 case BUILT_IN_UMULLL_OVERFLOW:
9359 opcode = MULT_EXPR;
9360 ifn = IFN_MUL_OVERFLOW;
9361 break;
9362 default:
9363 gcc_unreachable ();
9366 /* For the "generic" overloads, the first two arguments can have different
9367 types and the last argument determines the target type to use to check
9368 for overflow. The arguments of the other overloads all have the same
9369 type. */
9370 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9372 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9373 arguments are constant, attempt to fold the built-in call into a constant
9374 expression indicating whether or not it detected an overflow. */
9375 if (ovf_only
9376 && TREE_CODE (arg0) == INTEGER_CST
9377 && TREE_CODE (arg1) == INTEGER_CST)
9378 /* Perform the computation in the target type and check for overflow. */
9379 return omit_one_operand_loc (loc, boolean_type_node,
9380 arith_overflowed_p (opcode, type, arg0, arg1)
9381 ? boolean_true_node : boolean_false_node,
9382 arg2);
9384 tree intres, ovfres;
9385 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9387 intres = fold_binary_loc (loc, opcode, type,
9388 fold_convert_loc (loc, type, arg0),
9389 fold_convert_loc (loc, type, arg1));
9390 if (TREE_OVERFLOW (intres))
9391 intres = drop_tree_overflow (intres);
9392 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9393 ? boolean_true_node : boolean_false_node);
9395 else
9397 tree ctype = build_complex_type (type);
9398 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9399 arg0, arg1);
9400 tree tgt = save_expr (call);
9401 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9402 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9403 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9406 if (ovf_only)
9407 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9409 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9410 tree store
9411 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9412 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9415 /* Fold a call to __builtin_FILE to a constant string. */
9417 static inline tree
9418 fold_builtin_FILE (location_t loc)
9420 if (const char *fname = LOCATION_FILE (loc))
9422 /* The documentation says this builtin is equivalent to the preprocessor
9423 __FILE__ macro so it appears appropriate to use the same file prefix
9424 mappings. */
9425 fname = remap_macro_filename (fname);
9426 return build_string_literal (strlen (fname) + 1, fname);
9429 return build_string_literal (1, "");
9432 /* Fold a call to __builtin_FUNCTION to a constant string. */
9434 static inline tree
9435 fold_builtin_FUNCTION ()
9437 const char *name = "";
9439 if (current_function_decl)
9440 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9442 return build_string_literal (strlen (name) + 1, name);
9445 /* Fold a call to __builtin_LINE to an integer constant. */
9447 static inline tree
9448 fold_builtin_LINE (location_t loc, tree type)
9450 return build_int_cst (type, LOCATION_LINE (loc));
9453 /* Fold a call to built-in function FNDECL with 0 arguments.
9454 This function returns NULL_TREE if no simplification was possible. */
9456 static tree
9457 fold_builtin_0 (location_t loc, tree fndecl)
9459 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9460 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9461 switch (fcode)
9463 case BUILT_IN_FILE:
9464 return fold_builtin_FILE (loc);
9466 case BUILT_IN_FUNCTION:
9467 return fold_builtin_FUNCTION ();
9469 case BUILT_IN_LINE:
9470 return fold_builtin_LINE (loc, type);
9472 CASE_FLT_FN (BUILT_IN_INF):
9473 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9474 case BUILT_IN_INFD32:
9475 case BUILT_IN_INFD64:
9476 case BUILT_IN_INFD128:
9477 return fold_builtin_inf (loc, type, true);
9479 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9480 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9481 return fold_builtin_inf (loc, type, false);
9483 case BUILT_IN_CLASSIFY_TYPE:
9484 return fold_builtin_classify_type (NULL_TREE);
9486 default:
9487 break;
9489 return NULL_TREE;
9492 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9493 This function returns NULL_TREE if no simplification was possible. */
9495 static tree
9496 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9498 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9499 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9501 if (TREE_CODE (arg0) == ERROR_MARK)
9502 return NULL_TREE;
9504 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9505 return ret;
9507 switch (fcode)
9509 case BUILT_IN_CONSTANT_P:
9511 tree val = fold_builtin_constant_p (arg0);
9513 /* Gimplification will pull the CALL_EXPR for the builtin out of
9514 an if condition. When not optimizing, we'll not CSE it back.
9515 To avoid link error types of regressions, return false now. */
9516 if (!val && !optimize)
9517 val = integer_zero_node;
9519 return val;
9522 case BUILT_IN_CLASSIFY_TYPE:
9523 return fold_builtin_classify_type (arg0);
9525 case BUILT_IN_STRLEN:
9526 return fold_builtin_strlen (loc, type, arg0);
9528 CASE_FLT_FN (BUILT_IN_FABS):
9529 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9530 case BUILT_IN_FABSD32:
9531 case BUILT_IN_FABSD64:
9532 case BUILT_IN_FABSD128:
9533 return fold_builtin_fabs (loc, arg0, type);
9535 case BUILT_IN_ABS:
9536 case BUILT_IN_LABS:
9537 case BUILT_IN_LLABS:
9538 case BUILT_IN_IMAXABS:
9539 return fold_builtin_abs (loc, arg0, type);
9541 CASE_FLT_FN (BUILT_IN_CONJ):
9542 if (validate_arg (arg0, COMPLEX_TYPE)
9543 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9544 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9545 break;
9547 CASE_FLT_FN (BUILT_IN_CREAL):
9548 if (validate_arg (arg0, COMPLEX_TYPE)
9549 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9550 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9551 break;
9553 CASE_FLT_FN (BUILT_IN_CIMAG):
9554 if (validate_arg (arg0, COMPLEX_TYPE)
9555 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9556 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9557 break;
9559 CASE_FLT_FN (BUILT_IN_CARG):
9560 return fold_builtin_carg (loc, arg0, type);
9562 case BUILT_IN_ISASCII:
9563 return fold_builtin_isascii (loc, arg0);
9565 case BUILT_IN_TOASCII:
9566 return fold_builtin_toascii (loc, arg0);
9568 case BUILT_IN_ISDIGIT:
9569 return fold_builtin_isdigit (loc, arg0);
9571 CASE_FLT_FN (BUILT_IN_FINITE):
9572 case BUILT_IN_FINITED32:
9573 case BUILT_IN_FINITED64:
9574 case BUILT_IN_FINITED128:
9575 case BUILT_IN_ISFINITE:
9577 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9578 if (ret)
9579 return ret;
9580 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9583 CASE_FLT_FN (BUILT_IN_ISINF):
9584 case BUILT_IN_ISINFD32:
9585 case BUILT_IN_ISINFD64:
9586 case BUILT_IN_ISINFD128:
9588 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9589 if (ret)
9590 return ret;
9591 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9594 case BUILT_IN_ISNORMAL:
9595 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9597 case BUILT_IN_ISINF_SIGN:
9598 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9600 CASE_FLT_FN (BUILT_IN_ISNAN):
9601 case BUILT_IN_ISNAND32:
9602 case BUILT_IN_ISNAND64:
9603 case BUILT_IN_ISNAND128:
9604 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9606 case BUILT_IN_FREE:
9607 if (integer_zerop (arg0))
9608 return build_empty_stmt (loc);
9609 break;
9611 default:
9612 break;
9615 return NULL_TREE;
9619 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9620 This function returns NULL_TREE if no simplification was possible. */
9622 static tree
9623 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9625 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9626 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9628 if (TREE_CODE (arg0) == ERROR_MARK
9629 || TREE_CODE (arg1) == ERROR_MARK)
9630 return NULL_TREE;
9632 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9633 return ret;
9635 switch (fcode)
9637 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9638 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9639 if (validate_arg (arg0, REAL_TYPE)
9640 && validate_arg (arg1, POINTER_TYPE))
9641 return do_mpfr_lgamma_r (arg0, arg1, type);
9642 break;
9644 CASE_FLT_FN (BUILT_IN_FREXP):
9645 return fold_builtin_frexp (loc, arg0, arg1, type);
9647 CASE_FLT_FN (BUILT_IN_MODF):
9648 return fold_builtin_modf (loc, arg0, arg1, type);
9650 case BUILT_IN_STRSPN:
9651 return fold_builtin_strspn (loc, arg0, arg1);
9653 case BUILT_IN_STRCSPN:
9654 return fold_builtin_strcspn (loc, arg0, arg1);
9656 case BUILT_IN_STRPBRK:
9657 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9659 case BUILT_IN_EXPECT:
9660 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9662 case BUILT_IN_ISGREATER:
9663 return fold_builtin_unordered_cmp (loc, fndecl,
9664 arg0, arg1, UNLE_EXPR, LE_EXPR);
9665 case BUILT_IN_ISGREATEREQUAL:
9666 return fold_builtin_unordered_cmp (loc, fndecl,
9667 arg0, arg1, UNLT_EXPR, LT_EXPR);
9668 case BUILT_IN_ISLESS:
9669 return fold_builtin_unordered_cmp (loc, fndecl,
9670 arg0, arg1, UNGE_EXPR, GE_EXPR);
9671 case BUILT_IN_ISLESSEQUAL:
9672 return fold_builtin_unordered_cmp (loc, fndecl,
9673 arg0, arg1, UNGT_EXPR, GT_EXPR);
9674 case BUILT_IN_ISLESSGREATER:
9675 return fold_builtin_unordered_cmp (loc, fndecl,
9676 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9677 case BUILT_IN_ISUNORDERED:
9678 return fold_builtin_unordered_cmp (loc, fndecl,
9679 arg0, arg1, UNORDERED_EXPR,
9680 NOP_EXPR);
9682 /* We do the folding for va_start in the expander. */
9683 case BUILT_IN_VA_START:
9684 break;
9686 case BUILT_IN_OBJECT_SIZE:
9687 return fold_builtin_object_size (arg0, arg1);
9689 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9690 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9692 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9693 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9695 default:
9696 break;
9698 return NULL_TREE;
9701 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9702 and ARG2.
9703 This function returns NULL_TREE if no simplification was possible. */
9705 static tree
9706 fold_builtin_3 (location_t loc, tree fndecl,
9707 tree arg0, tree arg1, tree arg2)
9709 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9710 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9712 if (TREE_CODE (arg0) == ERROR_MARK
9713 || TREE_CODE (arg1) == ERROR_MARK
9714 || TREE_CODE (arg2) == ERROR_MARK)
9715 return NULL_TREE;
9717 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9718 arg0, arg1, arg2))
9719 return ret;
9721 switch (fcode)
9724 CASE_FLT_FN (BUILT_IN_SINCOS):
9725 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9727 CASE_FLT_FN (BUILT_IN_REMQUO):
9728 if (validate_arg (arg0, REAL_TYPE)
9729 && validate_arg (arg1, REAL_TYPE)
9730 && validate_arg (arg2, POINTER_TYPE))
9731 return do_mpfr_remquo (arg0, arg1, arg2);
9732 break;
9734 case BUILT_IN_MEMCMP:
9735 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9737 case BUILT_IN_EXPECT:
9738 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9740 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9741 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9743 case BUILT_IN_ADD_OVERFLOW:
9744 case BUILT_IN_SUB_OVERFLOW:
9745 case BUILT_IN_MUL_OVERFLOW:
9746 case BUILT_IN_ADD_OVERFLOW_P:
9747 case BUILT_IN_SUB_OVERFLOW_P:
9748 case BUILT_IN_MUL_OVERFLOW_P:
9749 case BUILT_IN_SADD_OVERFLOW:
9750 case BUILT_IN_SADDL_OVERFLOW:
9751 case BUILT_IN_SADDLL_OVERFLOW:
9752 case BUILT_IN_SSUB_OVERFLOW:
9753 case BUILT_IN_SSUBL_OVERFLOW:
9754 case BUILT_IN_SSUBLL_OVERFLOW:
9755 case BUILT_IN_SMUL_OVERFLOW:
9756 case BUILT_IN_SMULL_OVERFLOW:
9757 case BUILT_IN_SMULLL_OVERFLOW:
9758 case BUILT_IN_UADD_OVERFLOW:
9759 case BUILT_IN_UADDL_OVERFLOW:
9760 case BUILT_IN_UADDLL_OVERFLOW:
9761 case BUILT_IN_USUB_OVERFLOW:
9762 case BUILT_IN_USUBL_OVERFLOW:
9763 case BUILT_IN_USUBLL_OVERFLOW:
9764 case BUILT_IN_UMUL_OVERFLOW:
9765 case BUILT_IN_UMULL_OVERFLOW:
9766 case BUILT_IN_UMULLL_OVERFLOW:
9767 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9769 default:
9770 break;
9772 return NULL_TREE;
9775 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9776 arguments. IGNORE is true if the result of the
9777 function call is ignored. This function returns NULL_TREE if no
9778 simplification was possible. */
9780 tree
9781 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9783 tree ret = NULL_TREE;
9785 switch (nargs)
9787 case 0:
9788 ret = fold_builtin_0 (loc, fndecl);
9789 break;
9790 case 1:
9791 ret = fold_builtin_1 (loc, fndecl, args[0]);
9792 break;
9793 case 2:
9794 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9795 break;
9796 case 3:
9797 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9798 break;
9799 default:
9800 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9801 break;
9803 if (ret)
9805 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9806 SET_EXPR_LOCATION (ret, loc);
9807 return ret;
9809 return NULL_TREE;
9812 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9813 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9814 of arguments in ARGS to be omitted. OLDNARGS is the number of
9815 elements in ARGS. */
9817 static tree
9818 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9819 int skip, tree fndecl, int n, va_list newargs)
9821 int nargs = oldnargs - skip + n;
9822 tree *buffer;
9824 if (n > 0)
9826 int i, j;
9828 buffer = XALLOCAVEC (tree, nargs);
9829 for (i = 0; i < n; i++)
9830 buffer[i] = va_arg (newargs, tree);
9831 for (j = skip; j < oldnargs; j++, i++)
9832 buffer[i] = args[j];
9834 else
9835 buffer = args + skip;
9837 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9840 /* Return true if FNDECL shouldn't be folded right now.
9841 If a built-in function has an inline attribute always_inline
9842 wrapper, defer folding it after always_inline functions have
9843 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9844 might not be performed. */
9846 bool
9847 avoid_folding_inline_builtin (tree fndecl)
9849 return (DECL_DECLARED_INLINE_P (fndecl)
9850 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9851 && cfun
9852 && !cfun->always_inline_functions_inlined
9853 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9856 /* A wrapper function for builtin folding that prevents warnings for
9857 "statement without effect" and the like, caused by removing the
9858 call node earlier than the warning is generated. */
9860 tree
9861 fold_call_expr (location_t loc, tree exp, bool ignore)
9863 tree ret = NULL_TREE;
9864 tree fndecl = get_callee_fndecl (exp);
9865 if (fndecl && fndecl_built_in_p (fndecl)
9866 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9867 yet. Defer folding until we see all the arguments
9868 (after inlining). */
9869 && !CALL_EXPR_VA_ARG_PACK (exp))
9871 int nargs = call_expr_nargs (exp);
9873 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9874 instead last argument is __builtin_va_arg_pack (). Defer folding
9875 even in that case, until arguments are finalized. */
9876 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9878 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9879 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9880 return NULL_TREE;
9883 if (avoid_folding_inline_builtin (fndecl))
9884 return NULL_TREE;
9886 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9887 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9888 CALL_EXPR_ARGP (exp), ignore);
9889 else
9891 tree *args = CALL_EXPR_ARGP (exp);
9892 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9893 if (ret)
9894 return ret;
9897 return NULL_TREE;
9900 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9901 N arguments are passed in the array ARGARRAY. Return a folded
9902 expression or NULL_TREE if no simplification was possible. */
9904 tree
9905 fold_builtin_call_array (location_t loc, tree,
9906 tree fn,
9907 int n,
9908 tree *argarray)
9910 if (TREE_CODE (fn) != ADDR_EXPR)
9911 return NULL_TREE;
9913 tree fndecl = TREE_OPERAND (fn, 0);
9914 if (TREE_CODE (fndecl) == FUNCTION_DECL
9915 && fndecl_built_in_p (fndecl))
9917 /* If last argument is __builtin_va_arg_pack (), arguments to this
9918 function are not finalized yet. Defer folding until they are. */
9919 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9921 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9922 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9923 return NULL_TREE;
9925 if (avoid_folding_inline_builtin (fndecl))
9926 return NULL_TREE;
9927 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9928 return targetm.fold_builtin (fndecl, n, argarray, false);
9929 else
9930 return fold_builtin_n (loc, fndecl, argarray, n, false);
9933 return NULL_TREE;
9936 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9937 along with N new arguments specified as the "..." parameters. SKIP
9938 is the number of arguments in EXP to be omitted. This function is used
9939 to do varargs-to-varargs transformations. */
9941 static tree
9942 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9944 va_list ap;
9945 tree t;
9947 va_start (ap, n);
9948 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9949 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9950 va_end (ap);
9952 return t;
9955 /* Validate a single argument ARG against a tree code CODE representing
9956 a type. Return true when argument is valid. */
9958 static bool
9959 validate_arg (const_tree arg, enum tree_code code)
9961 if (!arg)
9962 return false;
9963 else if (code == POINTER_TYPE)
9964 return POINTER_TYPE_P (TREE_TYPE (arg));
9965 else if (code == INTEGER_TYPE)
9966 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9967 return code == TREE_CODE (TREE_TYPE (arg));
9970 /* This function validates the types of a function call argument list
9971 against a specified list of tree_codes. If the last specifier is a 0,
9972 that represents an ellipses, otherwise the last specifier must be a
9973 VOID_TYPE.
9975 This is the GIMPLE version of validate_arglist. Eventually we want to
9976 completely convert builtins.c to work from GIMPLEs and the tree based
9977 validate_arglist will then be removed. */
9979 bool
9980 validate_gimple_arglist (const gcall *call, ...)
9982 enum tree_code code;
9983 bool res = 0;
9984 va_list ap;
9985 const_tree arg;
9986 size_t i;
9988 va_start (ap, call);
9989 i = 0;
9993 code = (enum tree_code) va_arg (ap, int);
9994 switch (code)
9996 case 0:
9997 /* This signifies an ellipses, any further arguments are all ok. */
9998 res = true;
9999 goto end;
10000 case VOID_TYPE:
10001 /* This signifies an endlink, if no arguments remain, return
10002 true, otherwise return false. */
10003 res = (i == gimple_call_num_args (call));
10004 goto end;
10005 default:
10006 /* If no parameters remain or the parameter's code does not
10007 match the specified code, return false. Otherwise continue
10008 checking any remaining arguments. */
10009 arg = gimple_call_arg (call, i++);
10010 if (!validate_arg (arg, code))
10011 goto end;
10012 break;
10015 while (1);
10017 /* We need gotos here since we can only have one VA_CLOSE in a
10018 function. */
10019 end: ;
10020 va_end (ap);
10022 return res;
10025 /* Default target-specific builtin expander that does nothing. */
10028 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10029 rtx target ATTRIBUTE_UNUSED,
10030 rtx subtarget ATTRIBUTE_UNUSED,
10031 machine_mode mode ATTRIBUTE_UNUSED,
10032 int ignore ATTRIBUTE_UNUSED)
10034 return NULL_RTX;
10037 /* Returns true is EXP represents data that would potentially reside
10038 in a readonly section. */
10040 bool
10041 readonly_data_expr (tree exp)
10043 STRIP_NOPS (exp);
10045 if (TREE_CODE (exp) != ADDR_EXPR)
10046 return false;
10048 exp = get_base_address (TREE_OPERAND (exp, 0));
10049 if (!exp)
10050 return false;
10052 /* Make sure we call decl_readonly_section only for trees it
10053 can handle (since it returns true for everything it doesn't
10054 understand). */
10055 if (TREE_CODE (exp) == STRING_CST
10056 || TREE_CODE (exp) == CONSTRUCTOR
10057 || (VAR_P (exp) && TREE_STATIC (exp)))
10058 return decl_readonly_section (exp, 0);
10059 else
10060 return false;
10063 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10064 to the call, and TYPE is its return type.
10066 Return NULL_TREE if no simplification was possible, otherwise return the
10067 simplified form of the call as a tree.
10069 The simplified form may be a constant or other expression which
10070 computes the same value, but in a more efficient manner (including
10071 calls to other builtin functions).
10073 The call may contain arguments which need to be evaluated, but
10074 which are not useful to determine the result of the call. In
10075 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10076 COMPOUND_EXPR will be an argument which must be evaluated.
10077 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10078 COMPOUND_EXPR in the chain will contain the tree for the simplified
10079 form of the builtin function call. */
10081 static tree
10082 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10084 if (!validate_arg (s1, POINTER_TYPE)
10085 || !validate_arg (s2, POINTER_TYPE))
10086 return NULL_TREE;
10087 else
10089 tree fn;
10090 const char *p1, *p2;
10092 p2 = c_getstr (s2);
10093 if (p2 == NULL)
10094 return NULL_TREE;
10096 p1 = c_getstr (s1);
10097 if (p1 != NULL)
10099 const char *r = strpbrk (p1, p2);
10100 tree tem;
10102 if (r == NULL)
10103 return build_int_cst (TREE_TYPE (s1), 0);
10105 /* Return an offset into the constant string argument. */
10106 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10107 return fold_convert_loc (loc, type, tem);
10110 if (p2[0] == '\0')
10111 /* strpbrk(x, "") == NULL.
10112 Evaluate and ignore s1 in case it had side-effects. */
10113 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10115 if (p2[1] != '\0')
10116 return NULL_TREE; /* Really call strpbrk. */
10118 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10119 if (!fn)
10120 return NULL_TREE;
10122 /* New argument list transforming strpbrk(s1, s2) to
10123 strchr(s1, s2[0]). */
10124 return build_call_expr_loc (loc, fn, 2, s1,
10125 build_int_cst (integer_type_node, p2[0]));
10129 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10130 to the call.
10132 Return NULL_TREE if no simplification was possible, otherwise return the
10133 simplified form of the call as a tree.
10135 The simplified form may be a constant or other expression which
10136 computes the same value, but in a more efficient manner (including
10137 calls to other builtin functions).
10139 The call may contain arguments which need to be evaluated, but
10140 which are not useful to determine the result of the call. In
10141 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10142 COMPOUND_EXPR will be an argument which must be evaluated.
10143 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10144 COMPOUND_EXPR in the chain will contain the tree for the simplified
10145 form of the builtin function call. */
10147 static tree
10148 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10150 if (!validate_arg (s1, POINTER_TYPE)
10151 || !validate_arg (s2, POINTER_TYPE))
10152 return NULL_TREE;
10153 else
10155 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10157 /* If either argument is "", return NULL_TREE. */
10158 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10159 /* Evaluate and ignore both arguments in case either one has
10160 side-effects. */
10161 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10162 s1, s2);
10163 return NULL_TREE;
10167 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10168 to the call.
10170 Return NULL_TREE if no simplification was possible, otherwise return the
10171 simplified form of the call as a tree.
10173 The simplified form may be a constant or other expression which
10174 computes the same value, but in a more efficient manner (including
10175 calls to other builtin functions).
10177 The call may contain arguments which need to be evaluated, but
10178 which are not useful to determine the result of the call. In
10179 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10180 COMPOUND_EXPR will be an argument which must be evaluated.
10181 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10182 COMPOUND_EXPR in the chain will contain the tree for the simplified
10183 form of the builtin function call. */
10185 static tree
10186 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10188 if (!validate_arg (s1, POINTER_TYPE)
10189 || !validate_arg (s2, POINTER_TYPE))
10190 return NULL_TREE;
10191 else
10193 /* If the first argument is "", return NULL_TREE. */
10194 const char *p1 = c_getstr (s1);
10195 if (p1 && *p1 == '\0')
10197 /* Evaluate and ignore argument s2 in case it has
10198 side-effects. */
10199 return omit_one_operand_loc (loc, size_type_node,
10200 size_zero_node, s2);
10203 /* If the second argument is "", return __builtin_strlen(s1). */
10204 const char *p2 = c_getstr (s2);
10205 if (p2 && *p2 == '\0')
10207 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10209 /* If the replacement _DECL isn't initialized, don't do the
10210 transformation. */
10211 if (!fn)
10212 return NULL_TREE;
10214 return build_call_expr_loc (loc, fn, 1, s1);
10216 return NULL_TREE;
10220 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10221 produced. False otherwise. This is done so that we don't output the error
10222 or warning twice or three times. */
10224 bool
10225 fold_builtin_next_arg (tree exp, bool va_start_p)
10227 tree fntype = TREE_TYPE (current_function_decl);
10228 int nargs = call_expr_nargs (exp);
10229 tree arg;
10230 /* There is good chance the current input_location points inside the
10231 definition of the va_start macro (perhaps on the token for
10232 builtin) in a system header, so warnings will not be emitted.
10233 Use the location in real source code. */
10234 location_t current_location =
10235 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10236 NULL);
10238 if (!stdarg_p (fntype))
10240 error ("%<va_start%> used in function with fixed arguments");
10241 return true;
10244 if (va_start_p)
10246 if (va_start_p && (nargs != 2))
10248 error ("wrong number of arguments to function %<va_start%>");
10249 return true;
10251 arg = CALL_EXPR_ARG (exp, 1);
10253 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10254 when we checked the arguments and if needed issued a warning. */
10255 else
10257 if (nargs == 0)
10259 /* Evidently an out of date version of <stdarg.h>; can't validate
10260 va_start's second argument, but can still work as intended. */
10261 warning_at (current_location,
10262 OPT_Wvarargs,
10263 "%<__builtin_next_arg%> called without an argument");
10264 return true;
10266 else if (nargs > 1)
10268 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10269 return true;
10271 arg = CALL_EXPR_ARG (exp, 0);
10274 if (TREE_CODE (arg) == SSA_NAME)
10275 arg = SSA_NAME_VAR (arg);
10277 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10278 or __builtin_next_arg (0) the first time we see it, after checking
10279 the arguments and if needed issuing a warning. */
10280 if (!integer_zerop (arg))
10282 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10284 /* Strip off all nops for the sake of the comparison. This
10285 is not quite the same as STRIP_NOPS. It does more.
10286 We must also strip off INDIRECT_EXPR for C++ reference
10287 parameters. */
10288 while (CONVERT_EXPR_P (arg)
10289 || TREE_CODE (arg) == INDIRECT_REF)
10290 arg = TREE_OPERAND (arg, 0);
10291 if (arg != last_parm)
10293 /* FIXME: Sometimes with the tree optimizers we can get the
10294 not the last argument even though the user used the last
10295 argument. We just warn and set the arg to be the last
10296 argument so that we will get wrong-code because of
10297 it. */
10298 warning_at (current_location,
10299 OPT_Wvarargs,
10300 "second parameter of %<va_start%> not last named argument");
10303 /* Undefined by C99 7.15.1.4p4 (va_start):
10304 "If the parameter parmN is declared with the register storage
10305 class, with a function or array type, or with a type that is
10306 not compatible with the type that results after application of
10307 the default argument promotions, the behavior is undefined."
10309 else if (DECL_REGISTER (arg))
10311 warning_at (current_location,
10312 OPT_Wvarargs,
10313 "undefined behavior when second parameter of "
10314 "%<va_start%> is declared with %<register%> storage");
10317 /* We want to verify the second parameter just once before the tree
10318 optimizers are run and then avoid keeping it in the tree,
10319 as otherwise we could warn even for correct code like:
10320 void foo (int i, ...)
10321 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10322 if (va_start_p)
10323 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10324 else
10325 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10327 return false;
10331 /* Expand a call EXP to __builtin_object_size. */
10333 static rtx
10334 expand_builtin_object_size (tree exp)
10336 tree ost;
10337 int object_size_type;
10338 tree fndecl = get_callee_fndecl (exp);
10340 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10342 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10343 exp, fndecl);
10344 expand_builtin_trap ();
10345 return const0_rtx;
10348 ost = CALL_EXPR_ARG (exp, 1);
10349 STRIP_NOPS (ost);
10351 if (TREE_CODE (ost) != INTEGER_CST
10352 || tree_int_cst_sgn (ost) < 0
10353 || compare_tree_int (ost, 3) > 0)
10355 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10356 exp, fndecl);
10357 expand_builtin_trap ();
10358 return const0_rtx;
10361 object_size_type = tree_to_shwi (ost);
10363 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10366 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10367 FCODE is the BUILT_IN_* to use.
10368 Return NULL_RTX if we failed; the caller should emit a normal call,
10369 otherwise try to get the result in TARGET, if convenient (and in
10370 mode MODE if that's convenient). */
10372 static rtx
10373 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10374 enum built_in_function fcode)
10376 if (!validate_arglist (exp,
10377 POINTER_TYPE,
10378 fcode == BUILT_IN_MEMSET_CHK
10379 ? INTEGER_TYPE : POINTER_TYPE,
10380 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10381 return NULL_RTX;
10383 tree dest = CALL_EXPR_ARG (exp, 0);
10384 tree src = CALL_EXPR_ARG (exp, 1);
10385 tree len = CALL_EXPR_ARG (exp, 2);
10386 tree size = CALL_EXPR_ARG (exp, 3);
10388 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10389 /*str=*/NULL_TREE, size);
10391 if (!tree_fits_uhwi_p (size))
10392 return NULL_RTX;
10394 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10396 /* Avoid transforming the checking call to an ordinary one when
10397 an overflow has been detected or when the call couldn't be
10398 validated because the size is not constant. */
10399 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10400 return NULL_RTX;
10402 tree fn = NULL_TREE;
10403 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10404 mem{cpy,pcpy,move,set} is available. */
10405 switch (fcode)
10407 case BUILT_IN_MEMCPY_CHK:
10408 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10409 break;
10410 case BUILT_IN_MEMPCPY_CHK:
10411 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10412 break;
10413 case BUILT_IN_MEMMOVE_CHK:
10414 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10415 break;
10416 case BUILT_IN_MEMSET_CHK:
10417 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10418 break;
10419 default:
10420 break;
10423 if (! fn)
10424 return NULL_RTX;
10426 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10427 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10428 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10429 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10431 else if (fcode == BUILT_IN_MEMSET_CHK)
10432 return NULL_RTX;
10433 else
10435 unsigned int dest_align = get_pointer_alignment (dest);
10437 /* If DEST is not a pointer type, call the normal function. */
10438 if (dest_align == 0)
10439 return NULL_RTX;
10441 /* If SRC and DEST are the same (and not volatile), do nothing. */
10442 if (operand_equal_p (src, dest, 0))
10444 tree expr;
10446 if (fcode != BUILT_IN_MEMPCPY_CHK)
10448 /* Evaluate and ignore LEN in case it has side-effects. */
10449 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10450 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10453 expr = fold_build_pointer_plus (dest, len);
10454 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10457 /* __memmove_chk special case. */
10458 if (fcode == BUILT_IN_MEMMOVE_CHK)
10460 unsigned int src_align = get_pointer_alignment (src);
10462 if (src_align == 0)
10463 return NULL_RTX;
10465 /* If src is categorized for a readonly section we can use
10466 normal __memcpy_chk. */
10467 if (readonly_data_expr (src))
10469 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10470 if (!fn)
10471 return NULL_RTX;
10472 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10473 dest, src, len, size);
10474 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10475 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10476 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10479 return NULL_RTX;
10483 /* Emit warning if a buffer overflow is detected at compile time. */
10485 static void
10486 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10488 /* The source string. */
10489 tree srcstr = NULL_TREE;
10490 /* The size of the destination object. */
10491 tree objsize = NULL_TREE;
10492 /* The string that is being concatenated with (as in __strcat_chk)
10493 or null if it isn't. */
10494 tree catstr = NULL_TREE;
10495 /* The maximum length of the source sequence in a bounded operation
10496 (such as __strncat_chk) or null if the operation isn't bounded
10497 (such as __strcat_chk). */
10498 tree maxread = NULL_TREE;
10499 /* The exact size of the access (such as in __strncpy_chk). */
10500 tree size = NULL_TREE;
10502 switch (fcode)
10504 case BUILT_IN_STRCPY_CHK:
10505 case BUILT_IN_STPCPY_CHK:
10506 srcstr = CALL_EXPR_ARG (exp, 1);
10507 objsize = CALL_EXPR_ARG (exp, 2);
10508 break;
10510 case BUILT_IN_STRCAT_CHK:
10511 /* For __strcat_chk the warning will be emitted only if overflowing
10512 by at least strlen (dest) + 1 bytes. */
10513 catstr = CALL_EXPR_ARG (exp, 0);
10514 srcstr = CALL_EXPR_ARG (exp, 1);
10515 objsize = CALL_EXPR_ARG (exp, 2);
10516 break;
10518 case BUILT_IN_STRNCAT_CHK:
10519 catstr = CALL_EXPR_ARG (exp, 0);
10520 srcstr = CALL_EXPR_ARG (exp, 1);
10521 maxread = CALL_EXPR_ARG (exp, 2);
10522 objsize = CALL_EXPR_ARG (exp, 3);
10523 break;
10525 case BUILT_IN_STRNCPY_CHK:
10526 case BUILT_IN_STPNCPY_CHK:
10527 srcstr = CALL_EXPR_ARG (exp, 1);
10528 size = CALL_EXPR_ARG (exp, 2);
10529 objsize = CALL_EXPR_ARG (exp, 3);
10530 break;
10532 case BUILT_IN_SNPRINTF_CHK:
10533 case BUILT_IN_VSNPRINTF_CHK:
10534 maxread = CALL_EXPR_ARG (exp, 1);
10535 objsize = CALL_EXPR_ARG (exp, 3);
10536 break;
10537 default:
10538 gcc_unreachable ();
10541 if (catstr && maxread)
10543 /* Check __strncat_chk. There is no way to determine the length
10544 of the string to which the source string is being appended so
10545 just warn when the length of the source string is not known. */
10546 check_strncat_sizes (exp, objsize);
10547 return;
10550 /* The destination argument is the first one for all built-ins above. */
10551 tree dst = CALL_EXPR_ARG (exp, 0);
10553 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10556 /* Emit warning if a buffer overflow is detected at compile time
10557 in __sprintf_chk/__vsprintf_chk calls. */
10559 static void
10560 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10562 tree size, len, fmt;
10563 const char *fmt_str;
10564 int nargs = call_expr_nargs (exp);
10566 /* Verify the required arguments in the original call. */
10568 if (nargs < 4)
10569 return;
10570 size = CALL_EXPR_ARG (exp, 2);
10571 fmt = CALL_EXPR_ARG (exp, 3);
10573 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10574 return;
10576 /* Check whether the format is a literal string constant. */
10577 fmt_str = c_getstr (fmt);
10578 if (fmt_str == NULL)
10579 return;
10581 if (!init_target_chars ())
10582 return;
10584 /* If the format doesn't contain % args or %%, we know its size. */
10585 if (strchr (fmt_str, target_percent) == 0)
10586 len = build_int_cstu (size_type_node, strlen (fmt_str));
10587 /* If the format is "%s" and first ... argument is a string literal,
10588 we know it too. */
10589 else if (fcode == BUILT_IN_SPRINTF_CHK
10590 && strcmp (fmt_str, target_percent_s) == 0)
10592 tree arg;
10594 if (nargs < 5)
10595 return;
10596 arg = CALL_EXPR_ARG (exp, 4);
10597 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10598 return;
10600 len = c_strlen (arg, 1);
10601 if (!len || ! tree_fits_uhwi_p (len))
10602 return;
10604 else
10605 return;
10607 /* Add one for the terminating nul. */
10608 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10610 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10611 /*maxread=*/NULL_TREE, len, size);
10614 /* Emit warning if a free is called with address of a variable. */
10616 static void
10617 maybe_emit_free_warning (tree exp)
10619 if (call_expr_nargs (exp) != 1)
10620 return;
10622 tree arg = CALL_EXPR_ARG (exp, 0);
10624 STRIP_NOPS (arg);
10625 if (TREE_CODE (arg) != ADDR_EXPR)
10626 return;
10628 arg = get_base_address (TREE_OPERAND (arg, 0));
10629 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10630 return;
10632 if (SSA_VAR_P (arg))
10633 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10634 "%Kattempt to free a non-heap object %qD", exp, arg);
10635 else
10636 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10637 "%Kattempt to free a non-heap object", exp);
10640 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10641 if possible. */
10643 static tree
10644 fold_builtin_object_size (tree ptr, tree ost)
10646 unsigned HOST_WIDE_INT bytes;
10647 int object_size_type;
10649 if (!validate_arg (ptr, POINTER_TYPE)
10650 || !validate_arg (ost, INTEGER_TYPE))
10651 return NULL_TREE;
10653 STRIP_NOPS (ost);
10655 if (TREE_CODE (ost) != INTEGER_CST
10656 || tree_int_cst_sgn (ost) < 0
10657 || compare_tree_int (ost, 3) > 0)
10658 return NULL_TREE;
10660 object_size_type = tree_to_shwi (ost);
10662 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10663 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10664 and (size_t) 0 for types 2 and 3. */
10665 if (TREE_SIDE_EFFECTS (ptr))
10666 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10668 if (TREE_CODE (ptr) == ADDR_EXPR)
10670 compute_builtin_object_size (ptr, object_size_type, &bytes);
10671 if (wi::fits_to_tree_p (bytes, size_type_node))
10672 return build_int_cstu (size_type_node, bytes);
10674 else if (TREE_CODE (ptr) == SSA_NAME)
10676 /* If object size is not known yet, delay folding until
10677 later. Maybe subsequent passes will help determining
10678 it. */
10679 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10680 && wi::fits_to_tree_p (bytes, size_type_node))
10681 return build_int_cstu (size_type_node, bytes);
10684 return NULL_TREE;
10687 /* Builtins with folding operations that operate on "..." arguments
10688 need special handling; we need to store the arguments in a convenient
10689 data structure before attempting any folding. Fortunately there are
10690 only a few builtins that fall into this category. FNDECL is the
10691 function, EXP is the CALL_EXPR for the call. */
10693 static tree
10694 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10696 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10697 tree ret = NULL_TREE;
10699 switch (fcode)
10701 case BUILT_IN_FPCLASSIFY:
10702 ret = fold_builtin_fpclassify (loc, args, nargs);
10703 break;
10705 default:
10706 break;
10708 if (ret)
10710 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10711 SET_EXPR_LOCATION (ret, loc);
10712 TREE_NO_WARNING (ret) = 1;
10713 return ret;
10715 return NULL_TREE;
10718 /* Initialize format string characters in the target charset. */
10720 bool
10721 init_target_chars (void)
10723 static bool init;
10724 if (!init)
10726 target_newline = lang_hooks.to_target_charset ('\n');
10727 target_percent = lang_hooks.to_target_charset ('%');
10728 target_c = lang_hooks.to_target_charset ('c');
10729 target_s = lang_hooks.to_target_charset ('s');
10730 if (target_newline == 0 || target_percent == 0 || target_c == 0
10731 || target_s == 0)
10732 return false;
10734 target_percent_c[0] = target_percent;
10735 target_percent_c[1] = target_c;
10736 target_percent_c[2] = '\0';
10738 target_percent_s[0] = target_percent;
10739 target_percent_s[1] = target_s;
10740 target_percent_s[2] = '\0';
10742 target_percent_s_newline[0] = target_percent;
10743 target_percent_s_newline[1] = target_s;
10744 target_percent_s_newline[2] = target_newline;
10745 target_percent_s_newline[3] = '\0';
10747 init = true;
10749 return true;
10752 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10753 and no overflow/underflow occurred. INEXACT is true if M was not
10754 exactly calculated. TYPE is the tree type for the result. This
10755 function assumes that you cleared the MPFR flags and then
10756 calculated M to see if anything subsequently set a flag prior to
10757 entering this function. Return NULL_TREE if any checks fail. */
10759 static tree
10760 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10762 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10763 overflow/underflow occurred. If -frounding-math, proceed iff the
10764 result of calling FUNC was exact. */
10765 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10766 && (!flag_rounding_math || !inexact))
10768 REAL_VALUE_TYPE rr;
10770 real_from_mpfr (&rr, m, type, GMP_RNDN);
10771 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10772 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10773 but the mpft_t is not, then we underflowed in the
10774 conversion. */
10775 if (real_isfinite (&rr)
10776 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10778 REAL_VALUE_TYPE rmode;
10780 real_convert (&rmode, TYPE_MODE (type), &rr);
10781 /* Proceed iff the specified mode can hold the value. */
10782 if (real_identical (&rmode, &rr))
10783 return build_real (type, rmode);
10786 return NULL_TREE;
10789 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10790 number and no overflow/underflow occurred. INEXACT is true if M
10791 was not exactly calculated. TYPE is the tree type for the result.
10792 This function assumes that you cleared the MPFR flags and then
10793 calculated M to see if anything subsequently set a flag prior to
10794 entering this function. Return NULL_TREE if any checks fail, if
10795 FORCE_CONVERT is true, then bypass the checks. */
10797 static tree
10798 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10800 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10801 overflow/underflow occurred. If -frounding-math, proceed iff the
10802 result of calling FUNC was exact. */
10803 if (force_convert
10804 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10805 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10806 && (!flag_rounding_math || !inexact)))
10808 REAL_VALUE_TYPE re, im;
10810 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10811 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10812 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10813 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10814 but the mpft_t is not, then we underflowed in the
10815 conversion. */
10816 if (force_convert
10817 || (real_isfinite (&re) && real_isfinite (&im)
10818 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10819 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10821 REAL_VALUE_TYPE re_mode, im_mode;
10823 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10824 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10825 /* Proceed iff the specified mode can hold the value. */
10826 if (force_convert
10827 || (real_identical (&re_mode, &re)
10828 && real_identical (&im_mode, &im)))
10829 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10830 build_real (TREE_TYPE (type), im_mode));
10833 return NULL_TREE;
10836 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10837 the pointer *(ARG_QUO) and return the result. The type is taken
10838 from the type of ARG0 and is used for setting the precision of the
10839 calculation and results. */
10841 static tree
10842 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10844 tree const type = TREE_TYPE (arg0);
10845 tree result = NULL_TREE;
10847 STRIP_NOPS (arg0);
10848 STRIP_NOPS (arg1);
10850 /* To proceed, MPFR must exactly represent the target floating point
10851 format, which only happens when the target base equals two. */
10852 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10853 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10854 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10856 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10857 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10859 if (real_isfinite (ra0) && real_isfinite (ra1))
10861 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10862 const int prec = fmt->p;
10863 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10864 tree result_rem;
10865 long integer_quo;
10866 mpfr_t m0, m1;
10868 mpfr_inits2 (prec, m0, m1, NULL);
10869 mpfr_from_real (m0, ra0, GMP_RNDN);
10870 mpfr_from_real (m1, ra1, GMP_RNDN);
10871 mpfr_clear_flags ();
10872 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10873 /* Remquo is independent of the rounding mode, so pass
10874 inexact=0 to do_mpfr_ckconv(). */
10875 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10876 mpfr_clears (m0, m1, NULL);
10877 if (result_rem)
10879 /* MPFR calculates quo in the host's long so it may
10880 return more bits in quo than the target int can hold
10881 if sizeof(host long) > sizeof(target int). This can
10882 happen even for native compilers in LP64 mode. In
10883 these cases, modulo the quo value with the largest
10884 number that the target int can hold while leaving one
10885 bit for the sign. */
10886 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10887 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10889 /* Dereference the quo pointer argument. */
10890 arg_quo = build_fold_indirect_ref (arg_quo);
10891 /* Proceed iff a valid pointer type was passed in. */
10892 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10894 /* Set the value. */
10895 tree result_quo
10896 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10897 build_int_cst (TREE_TYPE (arg_quo),
10898 integer_quo));
10899 TREE_SIDE_EFFECTS (result_quo) = 1;
10900 /* Combine the quo assignment with the rem. */
10901 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10902 result_quo, result_rem));
10907 return result;
10910 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10911 resulting value as a tree with type TYPE. The mpfr precision is
10912 set to the precision of TYPE. We assume that this mpfr function
10913 returns zero if the result could be calculated exactly within the
10914 requested precision. In addition, the integer pointer represented
10915 by ARG_SG will be dereferenced and set to the appropriate signgam
10916 (-1,1) value. */
10918 static tree
10919 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10921 tree result = NULL_TREE;
10923 STRIP_NOPS (arg);
10925 /* To proceed, MPFR must exactly represent the target floating point
10926 format, which only happens when the target base equals two. Also
10927 verify ARG is a constant and that ARG_SG is an int pointer. */
10928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10929 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10930 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10931 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10933 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10935 /* In addition to NaN and Inf, the argument cannot be zero or a
10936 negative integer. */
10937 if (real_isfinite (ra)
10938 && ra->cl != rvc_zero
10939 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10941 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10942 const int prec = fmt->p;
10943 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10944 int inexact, sg;
10945 mpfr_t m;
10946 tree result_lg;
10948 mpfr_init2 (m, prec);
10949 mpfr_from_real (m, ra, GMP_RNDN);
10950 mpfr_clear_flags ();
10951 inexact = mpfr_lgamma (m, &sg, m, rnd);
10952 result_lg = do_mpfr_ckconv (m, type, inexact);
10953 mpfr_clear (m);
10954 if (result_lg)
10956 tree result_sg;
10958 /* Dereference the arg_sg pointer argument. */
10959 arg_sg = build_fold_indirect_ref (arg_sg);
10960 /* Assign the signgam value into *arg_sg. */
10961 result_sg = fold_build2 (MODIFY_EXPR,
10962 TREE_TYPE (arg_sg), arg_sg,
10963 build_int_cst (TREE_TYPE (arg_sg), sg));
10964 TREE_SIDE_EFFECTS (result_sg) = 1;
10965 /* Combine the signgam assignment with the lgamma result. */
10966 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10967 result_sg, result_lg));
10972 return result;
10975 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10976 mpc function FUNC on it and return the resulting value as a tree
10977 with type TYPE. The mpfr precision is set to the precision of
10978 TYPE. We assume that function FUNC returns zero if the result
10979 could be calculated exactly within the requested precision. If
10980 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10981 in the arguments and/or results. */
10983 tree
10984 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10985 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10987 tree result = NULL_TREE;
10989 STRIP_NOPS (arg0);
10990 STRIP_NOPS (arg1);
10992 /* To proceed, MPFR must exactly represent the target floating point
10993 format, which only happens when the target base equals two. */
10994 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10996 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10998 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11000 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11001 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11002 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11003 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11005 if (do_nonfinite
11006 || (real_isfinite (re0) && real_isfinite (im0)
11007 && real_isfinite (re1) && real_isfinite (im1)))
11009 const struct real_format *const fmt =
11010 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11011 const int prec = fmt->p;
11012 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11013 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11014 int inexact;
11015 mpc_t m0, m1;
11017 mpc_init2 (m0, prec);
11018 mpc_init2 (m1, prec);
11019 mpfr_from_real (mpc_realref (m0), re0, rnd);
11020 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11021 mpfr_from_real (mpc_realref (m1), re1, rnd);
11022 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11023 mpfr_clear_flags ();
11024 inexact = func (m0, m0, m1, crnd);
11025 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11026 mpc_clear (m0);
11027 mpc_clear (m1);
11031 return result;
11034 /* A wrapper function for builtin folding that prevents warnings for
11035 "statement without effect" and the like, caused by removing the
11036 call node earlier than the warning is generated. */
11038 tree
11039 fold_call_stmt (gcall *stmt, bool ignore)
11041 tree ret = NULL_TREE;
11042 tree fndecl = gimple_call_fndecl (stmt);
11043 location_t loc = gimple_location (stmt);
11044 if (fndecl && fndecl_built_in_p (fndecl)
11045 && !gimple_call_va_arg_pack_p (stmt))
11047 int nargs = gimple_call_num_args (stmt);
11048 tree *args = (nargs > 0
11049 ? gimple_call_arg_ptr (stmt, 0)
11050 : &error_mark_node);
11052 if (avoid_folding_inline_builtin (fndecl))
11053 return NULL_TREE;
11054 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11056 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11058 else
11060 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11061 if (ret)
11063 /* Propagate location information from original call to
11064 expansion of builtin. Otherwise things like
11065 maybe_emit_chk_warning, that operate on the expansion
11066 of a builtin, will use the wrong location information. */
11067 if (gimple_has_location (stmt))
11069 tree realret = ret;
11070 if (TREE_CODE (ret) == NOP_EXPR)
11071 realret = TREE_OPERAND (ret, 0);
11072 if (CAN_HAVE_LOCATION_P (realret)
11073 && !EXPR_HAS_LOCATION (realret))
11074 SET_EXPR_LOCATION (realret, loc);
11075 return realret;
11077 return ret;
11081 return NULL_TREE;
11084 /* Look up the function in builtin_decl that corresponds to DECL
11085 and set ASMSPEC as its user assembler name. DECL must be a
11086 function decl that declares a builtin. */
11088 void
11089 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11091 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11092 && asmspec != 0);
11094 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11095 set_user_assembler_name (builtin, asmspec);
11097 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11098 && INT_TYPE_SIZE < BITS_PER_WORD)
11100 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11101 set_user_assembler_libfunc ("ffs", asmspec);
11102 set_optab_libfunc (ffs_optab, mode, "ffs");
11106 /* Return true if DECL is a builtin that expands to a constant or similarly
11107 simple code. */
11108 bool
11109 is_simple_builtin (tree decl)
11111 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11112 switch (DECL_FUNCTION_CODE (decl))
11114 /* Builtins that expand to constants. */
11115 case BUILT_IN_CONSTANT_P:
11116 case BUILT_IN_EXPECT:
11117 case BUILT_IN_OBJECT_SIZE:
11118 case BUILT_IN_UNREACHABLE:
11119 /* Simple register moves or loads from stack. */
11120 case BUILT_IN_ASSUME_ALIGNED:
11121 case BUILT_IN_RETURN_ADDRESS:
11122 case BUILT_IN_EXTRACT_RETURN_ADDR:
11123 case BUILT_IN_FROB_RETURN_ADDR:
11124 case BUILT_IN_RETURN:
11125 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11126 case BUILT_IN_FRAME_ADDRESS:
11127 case BUILT_IN_VA_END:
11128 case BUILT_IN_STACK_SAVE:
11129 case BUILT_IN_STACK_RESTORE:
11130 /* Exception state returns or moves registers around. */
11131 case BUILT_IN_EH_FILTER:
11132 case BUILT_IN_EH_POINTER:
11133 case BUILT_IN_EH_COPY_VALUES:
11134 return true;
11136 default:
11137 return false;
11140 return false;
11143 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11144 most probably expanded inline into reasonably simple code. This is a
11145 superset of is_simple_builtin. */
11146 bool
11147 is_inexpensive_builtin (tree decl)
11149 if (!decl)
11150 return false;
11151 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11152 return true;
11153 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11154 switch (DECL_FUNCTION_CODE (decl))
11156 case BUILT_IN_ABS:
11157 CASE_BUILT_IN_ALLOCA:
11158 case BUILT_IN_BSWAP16:
11159 case BUILT_IN_BSWAP32:
11160 case BUILT_IN_BSWAP64:
11161 case BUILT_IN_CLZ:
11162 case BUILT_IN_CLZIMAX:
11163 case BUILT_IN_CLZL:
11164 case BUILT_IN_CLZLL:
11165 case BUILT_IN_CTZ:
11166 case BUILT_IN_CTZIMAX:
11167 case BUILT_IN_CTZL:
11168 case BUILT_IN_CTZLL:
11169 case BUILT_IN_FFS:
11170 case BUILT_IN_FFSIMAX:
11171 case BUILT_IN_FFSL:
11172 case BUILT_IN_FFSLL:
11173 case BUILT_IN_IMAXABS:
11174 case BUILT_IN_FINITE:
11175 case BUILT_IN_FINITEF:
11176 case BUILT_IN_FINITEL:
11177 case BUILT_IN_FINITED32:
11178 case BUILT_IN_FINITED64:
11179 case BUILT_IN_FINITED128:
11180 case BUILT_IN_FPCLASSIFY:
11181 case BUILT_IN_ISFINITE:
11182 case BUILT_IN_ISINF_SIGN:
11183 case BUILT_IN_ISINF:
11184 case BUILT_IN_ISINFF:
11185 case BUILT_IN_ISINFL:
11186 case BUILT_IN_ISINFD32:
11187 case BUILT_IN_ISINFD64:
11188 case BUILT_IN_ISINFD128:
11189 case BUILT_IN_ISNAN:
11190 case BUILT_IN_ISNANF:
11191 case BUILT_IN_ISNANL:
11192 case BUILT_IN_ISNAND32:
11193 case BUILT_IN_ISNAND64:
11194 case BUILT_IN_ISNAND128:
11195 case BUILT_IN_ISNORMAL:
11196 case BUILT_IN_ISGREATER:
11197 case BUILT_IN_ISGREATEREQUAL:
11198 case BUILT_IN_ISLESS:
11199 case BUILT_IN_ISLESSEQUAL:
11200 case BUILT_IN_ISLESSGREATER:
11201 case BUILT_IN_ISUNORDERED:
11202 case BUILT_IN_VA_ARG_PACK:
11203 case BUILT_IN_VA_ARG_PACK_LEN:
11204 case BUILT_IN_VA_COPY:
11205 case BUILT_IN_TRAP:
11206 case BUILT_IN_SAVEREGS:
11207 case BUILT_IN_POPCOUNTL:
11208 case BUILT_IN_POPCOUNTLL:
11209 case BUILT_IN_POPCOUNTIMAX:
11210 case BUILT_IN_POPCOUNT:
11211 case BUILT_IN_PARITYL:
11212 case BUILT_IN_PARITYLL:
11213 case BUILT_IN_PARITYIMAX:
11214 case BUILT_IN_PARITY:
11215 case BUILT_IN_LABS:
11216 case BUILT_IN_LLABS:
11217 case BUILT_IN_PREFETCH:
11218 case BUILT_IN_ACC_ON_DEVICE:
11219 return true;
11221 default:
11222 return is_simple_builtin (decl);
11225 return false;
11228 /* Return true if T is a constant and the value cast to a target char
11229 can be represented by a host char.
11230 Store the casted char constant in *P if so. */
11232 bool
11233 target_char_cst_p (tree t, char *p)
11235 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11236 return false;
11238 *p = (char)tree_to_uhwi (t);
11239 return true;