coarray_data_1.f90: Link against libatomic if target libatomic_available.
[official-gcc.git] / gcc / builtins.c
blob0910c725ecbdd108408b57bd062b0737affe3f60
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp,
129 memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
577 c_strlen_data lendata = { };
578 tree len = c_strlen (exp, 1, &lendata);
579 if (len == NULL_TREE && lendata.minlen && lendata.decl)
581 if (size)
583 len = lendata.minlen;
584 if (lendata.off)
586 /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 but not in a SSA_NAME + CST expression. */
588 if (TREE_CODE (lendata.off) == INTEGER_CST)
589 *exact = true;
590 else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
593 /* Subtract the offset from the size of the array. */
594 *exact = false;
595 tree temp = TREE_OPERAND (lendata.off, 1);
596 temp = fold_convert (ssizetype, temp);
597 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 else
600 *exact = false;
602 else
603 *exact = true;
605 *size = len;
607 return lendata.decl;
610 return NULL_TREE;
613 /* Compute the length of a null-terminated character string or wide
614 character string handling character sizes of 1, 2, and 4 bytes.
615 TREE_STRING_LENGTH is not the right way because it evaluates to
616 the size of the character array in bytes (as opposed to characters)
617 and because it can contain a zero byte in the middle.
619 ONLY_VALUE should be nonzero if the result is not going to be emitted
620 into the instruction stream and zero if it is going to be expanded.
621 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622 is returned, otherwise NULL, since
623 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624 evaluate the side-effects.
626 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627 accesses. Note that this implies the result is not going to be emitted
628 into the instruction stream.
630 Additional information about the string accessed may be recorded
631 in DATA. For example, if SRC references an unterminated string,
632 then the declaration will be stored in the DECL field. If the
633 length of the unterminated string can be determined, it'll be
634 stored in the LEN field. Note this length could well be different
635 than what a C strlen call would return.
637 ELTSIZE is 1 for normal single byte character strings, and 2 or
638 4 for wide characer strings. ELTSIZE is by default 1.
640 The value returned is of type `ssizetype'. */
642 tree
643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
645 /* If we were not passed a DATA pointer, then get one to a local
646 structure. That avoids having to check DATA for NULL before
647 each time we want to use it. */
648 c_strlen_data local_strlen_data = { };
649 if (!data)
650 data = &local_strlen_data;
652 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653 STRIP_NOPS (src);
654 if (TREE_CODE (src) == COND_EXPR
655 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
657 tree len1, len2;
659 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661 if (tree_int_cst_equal (len1, len2))
662 return len1;
665 if (TREE_CODE (src) == COMPOUND_EXPR
666 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
669 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
671 /* Offset from the beginning of the string in bytes. */
672 tree byteoff;
673 tree memsize;
674 tree decl;
675 src = string_constant (src, &byteoff, &memsize, &decl);
676 if (src == 0)
677 return NULL_TREE;
679 /* Determine the size of the string element. */
680 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681 return NULL_TREE;
683 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685 in case the latter is less than the size of the array, such as when
686 SRC refers to a short string literal used to initialize a large array.
687 In that case, the elements of the array after the terminating NUL are
688 all NUL. */
689 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690 strelts = strelts / eltsize;
692 if (!tree_fits_uhwi_p (memsize))
693 return NULL_TREE;
695 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
697 /* PTR can point to the byte representation of any string type, including
698 char* and wchar_t*. */
699 const char *ptr = TREE_STRING_POINTER (src);
701 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
703 /* The code below works only for single byte character types. */
704 if (eltsize != 1)
705 return NULL_TREE;
707 /* If the string has an internal NUL character followed by any
708 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 the offset to the following NUL if we don't know where to
710 start searching for it. */
711 unsigned len = string_length (ptr, eltsize, strelts);
713 /* Return when an embedded null character is found or none at all.
714 In the latter case, set the DECL/LEN field in the DATA structure
715 so that callers may examine them. */
716 if (len + 1 < strelts)
717 return NULL_TREE;
718 else if (len >= maxelts)
720 data->decl = decl;
721 data->off = byteoff;
722 data->minlen = ssize_int (len);
723 return NULL_TREE;
726 /* For empty strings the result should be zero. */
727 if (len == 0)
728 return ssize_int (0);
730 /* We don't know the starting offset, but we do know that the string
731 has no internal zero bytes. If the offset falls within the bounds
732 of the string subtract the offset from the length of the string,
733 and return that. Otherwise the length is zero. Take care to
734 use SAVE_EXPR in case the OFFSET has side-effects. */
735 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 : byteoff;
737 offsave = fold_convert_loc (loc, sizetype, offsave);
738 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 size_int (len));
740 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 offsave);
742 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 build_zero_cst (ssizetype));
747 /* Offset from the beginning of the string in elements. */
748 HOST_WIDE_INT eltoff;
750 /* We have a known offset into the string. Start searching there for
751 a null character if we can represent it as a single HOST_WIDE_INT. */
752 if (byteoff == 0)
753 eltoff = 0;
754 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755 eltoff = -1;
756 else
757 eltoff = tree_to_uhwi (byteoff) / eltsize;
759 /* If the offset is known to be out of bounds, warn, and call strlen at
760 runtime. */
761 if (eltoff < 0 || eltoff >= maxelts)
763 /* Suppress multiple warnings for propagated constant strings. */
764 if (only_value != 2
765 && !TREE_NO_WARNING (src)
766 && warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff))
769 TREE_NO_WARNING (src) = 1;
770 return NULL_TREE;
773 /* If eltoff is larger than strelts but less than maxelts the
774 string length is zero, since the excess memory will be zero. */
775 if (eltoff > strelts)
776 return ssize_int (0);
778 /* Use strlen to search for the first zero byte. Since any strings
779 constructed with build_string will have nulls appended, we win even
780 if we get handed something like (char[4])"abcd".
782 Since ELTOFF is our starting index into the string, no further
783 calculation is needed. */
784 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 strelts - eltoff);
787 /* Don't know what to return if there was no zero termination.
788 Ideally this would turn into a gcc_checking_assert over time.
789 Set DECL/LEN so callers can examine them. */
790 if (len >= maxelts - eltoff)
792 data->decl = decl;
793 data->off = byteoff;
794 data->minlen = ssize_int (len);
795 return NULL_TREE;
798 return ssize_int (len);
801 /* Return a constant integer corresponding to target reading
802 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
803 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804 are assumed to be zero, otherwise it reads as many characters
805 as needed. */
808 c_readstr (const char *str, scalar_int_mode mode,
809 bool null_terminated_p/*=true*/)
811 HOST_WIDE_INT ch;
812 unsigned int i, j;
813 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
815 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817 / HOST_BITS_PER_WIDE_INT;
819 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820 for (i = 0; i < len; i++)
821 tmp[i] = 0;
823 ch = 1;
824 for (i = 0; i < GET_MODE_SIZE (mode); i++)
826 j = i;
827 if (WORDS_BIG_ENDIAN)
828 j = GET_MODE_SIZE (mode) - i - 1;
829 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832 j *= BITS_PER_UNIT;
834 if (ch || !null_terminated_p)
835 ch = (unsigned char) str[i];
836 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
839 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840 return immed_wide_int_const (c, mode);
843 /* Cast a target constant CST to target CHAR and if that value fits into
844 host char type, return zero and put that value into variable pointed to by
845 P. */
847 static int
848 target_char_cast (tree cst, char *p)
850 unsigned HOST_WIDE_INT val, hostval;
852 if (TREE_CODE (cst) != INTEGER_CST
853 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854 return 1;
856 /* Do not care if it fits or not right here. */
857 val = TREE_INT_CST_LOW (cst);
859 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
862 hostval = val;
863 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
866 if (val != hostval)
867 return 1;
869 *p = hostval;
870 return 0;
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
877 static tree
878 builtin_save_expr (tree exp)
880 if (TREE_CODE (exp) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp) == 0
882 && (TREE_CODE (exp) == PARM_DECL
883 || (VAR_P (exp) && !TREE_STATIC (exp)))))
884 return exp;
886 return save_expr (exp);
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
893 static rtx
894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
896 int i;
897 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898 if (tem == NULL_RTX)
900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 tem = frame_pointer_rtx;
911 else
913 tem = hard_frame_pointer_rtx;
915 /* Tell reload not to eliminate the frame pointer. */
916 crtl->accesses_prior_frames = 1;
920 if (count > 0)
921 SETUP_FRAME_ADDRESSES ();
923 /* On the SPARC, the return address is not in the frame, it is in a
924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
927 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928 count--;
930 /* Scan back COUNT frames to the specified frame. */
931 for (i = 0; i < count; i++)
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
935 tem = DYNAMIC_CHAIN_ADDRESS (tem);
936 tem = memory_address (Pmode, tem);
937 tem = gen_frame_mem (Pmode, tem);
938 tem = copy_to_reg (tem);
941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
943 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944 return FRAME_ADDR_RTX (tem);
946 /* For __builtin_return_address, get the return address from that frame. */
947 #ifdef RETURN_ADDR_RTX
948 tem = RETURN_ADDR_RTX (count, tem);
949 #else
950 tem = memory_address (Pmode,
951 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952 tem = gen_frame_mem (Pmode, tem);
953 #endif
954 return tem;
957 /* Alias set used for setjmp buffer. */
958 static alias_set_type setjmp_alias_set = -1;
960 /* Construct the leading half of a __builtin_setjmp call. Control will
961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
964 void
965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
967 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 rtx stack_save;
969 rtx mem;
971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
974 buf_addr = convert_memory_address (Pmode, buf_addr);
976 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
982 mem = gen_rtx_MEM (Pmode, buf_addr);
983 set_mem_alias_set (mem, setjmp_alias_set);
984 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
986 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 GET_MODE_SIZE (Pmode))),
988 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (validize_mem (mem),
991 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
993 stack_save = gen_rtx_MEM (sa_mode,
994 plus_constant (Pmode, buf_addr,
995 2 * GET_MODE_SIZE (Pmode)));
996 set_mem_alias_set (stack_save, setjmp_alias_set);
997 emit_stack_save (SAVE_NONLOCAL, &stack_save);
999 /* If there is further processing to do, do it. */
1000 if (targetm.have_builtin_setjmp_setup ())
1001 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1003 /* We have a nonlocal label. */
1004 cfun->has_nonlocal_label = 1;
1007 /* Construct the trailing part of a __builtin_setjmp call. This is
1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1011 void
1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1014 rtx chain;
1016 /* Mark the FP as used when we get here, so we have to make sure it's
1017 marked as used by this function. */
1018 emit_use (hard_frame_pointer_rtx);
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
1022 chain = rtx_for_static_chain (current_function_decl, true);
1023 if (chain && REG_P (chain))
1024 emit_clobber (chain);
1026 /* Now put in the code to restore the frame pointer, and argument
1027 pointer, if needed. */
1028 if (! targetm.have_nonlocal_goto ())
1030 /* First adjust our frame pointer to its actual value. It was
1031 previously set to the start of the virtual area corresponding to
1032 the stacked variables when we branched here and now needs to be
1033 adjusted to the actual hardware fp value.
1035 Assignments to virtual registers are converted by
1036 instantiate_virtual_regs into the corresponding assignment
1037 to the underlying register (fp in this case) that makes
1038 the original assignment true.
1039 So the following insn will actually be decrementing fp by
1040 TARGET_STARTING_FRAME_OFFSET. */
1041 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1043 /* Restoring the frame pointer also modifies the hard frame pointer.
1044 Mark it used (so that the previous assignment remains live once
1045 the frame pointer is eliminated) and clobbered (to represent the
1046 implicit update from the assignment). */
1047 emit_use (hard_frame_pointer_rtx);
1048 emit_clobber (hard_frame_pointer_rtx);
1051 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1053 /* If the argument pointer can be eliminated in favor of the
1054 frame pointer, we don't need to restore it. We assume here
1055 that if such an elimination is present, it can always be used.
1056 This is the case on all known machines; if we don't make this
1057 assumption, we do unnecessary saving on many machines. */
1058 size_t i;
1059 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1061 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1062 if (elim_regs[i].from == ARG_POINTER_REGNUM
1063 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1064 break;
1066 if (i == ARRAY_SIZE (elim_regs))
1068 /* Now restore our arg pointer from the address at which it
1069 was saved in our stack frame. */
1070 emit_move_insn (crtl->args.internal_arg_pointer,
1071 copy_to_reg (get_arg_pointer_save_area ()));
1075 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1076 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1077 else if (targetm.have_nonlocal_goto_receiver ())
1078 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1079 else
1080 { /* Nothing */ }
1082 /* We must not allow the code we just generated to be reordered by
1083 scheduling. Specifically, the update of the frame pointer must
1084 happen immediately, not later. */
1085 emit_insn (gen_blockage ());
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089 all will be used on all machines). It operates similarly to the C
1090 library function of the same name, but is more efficient. Much of
1091 the code below is copied from the handling of non-local gotos. */
1093 static void
1094 expand_builtin_longjmp (rtx buf_addr, rtx value)
1096 rtx fp, lab, stack;
1097 rtx_insn *insn, *last;
1098 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1100 /* DRAP is needed for stack realign if longjmp is expanded to current
1101 function */
1102 if (SUPPORTS_STACK_ALIGNMENT)
1103 crtl->need_drap = true;
1105 if (setjmp_alias_set == -1)
1106 setjmp_alias_set = new_alias_set ();
1108 buf_addr = convert_memory_address (Pmode, buf_addr);
1110 buf_addr = force_reg (Pmode, buf_addr);
1112 /* We require that the user must pass a second argument of 1, because
1113 that is what builtin_setjmp will return. */
1114 gcc_assert (value == const1_rtx);
1116 last = get_last_insn ();
1117 if (targetm.have_builtin_longjmp ())
1118 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1119 else
1121 fp = gen_rtx_MEM (Pmode, buf_addr);
1122 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1123 GET_MODE_SIZE (Pmode)));
1125 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1126 2 * GET_MODE_SIZE (Pmode)));
1127 set_mem_alias_set (fp, setjmp_alias_set);
1128 set_mem_alias_set (lab, setjmp_alias_set);
1129 set_mem_alias_set (stack, setjmp_alias_set);
1131 /* Pick up FP, label, and SP from the block and jump. This code is
1132 from expand_goto in stmt.c; see there for detailed comments. */
1133 if (targetm.have_nonlocal_goto ())
1134 /* We have to pass a value to the nonlocal_goto pattern that will
1135 get copied into the static_chain pointer, but it does not matter
1136 what that value is, because builtin_setjmp does not use it. */
1137 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1138 else
1140 lab = copy_to_reg (lab);
1142 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145 /* Restore the frame pointer and stack pointer. We must use a
1146 temporary since the setjmp buffer may be a local. */
1147 fp = copy_to_reg (fp);
1148 emit_stack_restore (SAVE_NONLOCAL, stack);
1149 emit_move_insn (hard_frame_pointer_rtx, fp);
1151 emit_use (hard_frame_pointer_rtx);
1152 emit_use (stack_pointer_rtx);
1153 emit_indirect_jump (lab);
1157 /* Search backwards and mark the jump insn as a non-local goto.
1158 Note that this precludes the use of __builtin_longjmp to a
1159 __builtin_setjmp target in the same function. However, we've
1160 already cautioned the user that these functions are for
1161 internal exception handling use only. */
1162 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1164 gcc_assert (insn != last);
1166 if (JUMP_P (insn))
1168 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1169 break;
1171 else if (CALL_P (insn))
1172 break;
1176 static inline bool
1177 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1179 return (iter->i < iter->n);
1182 /* This function validates the types of a function call argument list
1183 against a specified list of tree_codes. If the last specifier is a 0,
1184 that represents an ellipsis, otherwise the last specifier must be a
1185 VOID_TYPE. */
1187 static bool
1188 validate_arglist (const_tree callexpr, ...)
1190 enum tree_code code;
1191 bool res = 0;
1192 va_list ap;
1193 const_call_expr_arg_iterator iter;
1194 const_tree arg;
1196 va_start (ap, callexpr);
1197 init_const_call_expr_arg_iterator (callexpr, &iter);
1199 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1200 tree fn = CALL_EXPR_FN (callexpr);
1201 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1203 for (unsigned argno = 1; ; ++argno)
1205 code = (enum tree_code) va_arg (ap, int);
1207 switch (code)
1209 case 0:
1210 /* This signifies an ellipses, any further arguments are all ok. */
1211 res = true;
1212 goto end;
1213 case VOID_TYPE:
1214 /* This signifies an endlink, if no arguments remain, return
1215 true, otherwise return false. */
1216 res = !more_const_call_expr_args_p (&iter);
1217 goto end;
1218 case POINTER_TYPE:
1219 /* The actual argument must be nonnull when either the whole
1220 called function has been declared nonnull, or when the formal
1221 argument corresponding to the actual argument has been. */
1222 if (argmap
1223 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1225 arg = next_const_call_expr_arg (&iter);
1226 if (!validate_arg (arg, code) || integer_zerop (arg))
1227 goto end;
1228 break;
1230 /* FALLTHRU */
1231 default:
1232 /* If no parameters remain or the parameter's code does not
1233 match the specified code, return false. Otherwise continue
1234 checking any remaining arguments. */
1235 arg = next_const_call_expr_arg (&iter);
1236 if (!validate_arg (arg, code))
1237 goto end;
1238 break;
1242 /* We need gotos here since we can only have one VA_CLOSE in a
1243 function. */
1244 end: ;
1245 va_end (ap);
1247 BITMAP_FREE (argmap);
1249 return res;
1252 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1253 and the address of the save area. */
1255 static rtx
1256 expand_builtin_nonlocal_goto (tree exp)
1258 tree t_label, t_save_area;
1259 rtx r_label, r_save_area, r_fp, r_sp;
1260 rtx_insn *insn;
1262 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1263 return NULL_RTX;
1265 t_label = CALL_EXPR_ARG (exp, 0);
1266 t_save_area = CALL_EXPR_ARG (exp, 1);
1268 r_label = expand_normal (t_label);
1269 r_label = convert_memory_address (Pmode, r_label);
1270 r_save_area = expand_normal (t_save_area);
1271 r_save_area = convert_memory_address (Pmode, r_save_area);
1272 /* Copy the address of the save location to a register just in case it was
1273 based on the frame pointer. */
1274 r_save_area = copy_to_reg (r_save_area);
1275 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1276 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1277 plus_constant (Pmode, r_save_area,
1278 GET_MODE_SIZE (Pmode)));
1280 crtl->has_nonlocal_goto = 1;
1282 /* ??? We no longer need to pass the static chain value, afaik. */
1283 if (targetm.have_nonlocal_goto ())
1284 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1285 else
1287 r_label = copy_to_reg (r_label);
1289 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1290 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1292 /* Restore the frame pointer and stack pointer. We must use a
1293 temporary since the setjmp buffer may be a local. */
1294 r_fp = copy_to_reg (r_fp);
1295 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1296 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1298 /* USE of hard_frame_pointer_rtx added for consistency;
1299 not clear if really needed. */
1300 emit_use (hard_frame_pointer_rtx);
1301 emit_use (stack_pointer_rtx);
1303 /* If the architecture is using a GP register, we must
1304 conservatively assume that the target function makes use of it.
1305 The prologue of functions with nonlocal gotos must therefore
1306 initialize the GP register to the appropriate value, and we
1307 must then make sure that this value is live at the point
1308 of the jump. (Note that this doesn't necessarily apply
1309 to targets with a nonlocal_goto pattern; they are free
1310 to implement it in their own way. Note also that this is
1311 a no-op if the GP register is a global invariant.) */
1312 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1313 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1314 emit_use (pic_offset_table_rtx);
1316 emit_indirect_jump (r_label);
1319 /* Search backwards to the jump insn and mark it as a
1320 non-local goto. */
1321 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1323 if (JUMP_P (insn))
1325 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1326 break;
1328 else if (CALL_P (insn))
1329 break;
1332 return const0_rtx;
1335 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1336 (not all will be used on all machines) that was passed to __builtin_setjmp.
1337 It updates the stack pointer in that block to the current value. This is
1338 also called directly by the SJLJ exception handling code. */
1340 void
1341 expand_builtin_update_setjmp_buf (rtx buf_addr)
1343 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1344 buf_addr = convert_memory_address (Pmode, buf_addr);
1345 rtx stack_save
1346 = gen_rtx_MEM (sa_mode,
1347 memory_address
1348 (sa_mode,
1349 plus_constant (Pmode, buf_addr,
1350 2 * GET_MODE_SIZE (Pmode))));
1352 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1355 /* Expand a call to __builtin_prefetch. For a target that does not support
1356 data prefetch, evaluate the memory address argument in case it has side
1357 effects. */
1359 static void
1360 expand_builtin_prefetch (tree exp)
1362 tree arg0, arg1, arg2;
1363 int nargs;
1364 rtx op0, op1, op2;
1366 if (!validate_arglist (exp, POINTER_TYPE, 0))
1367 return;
1369 arg0 = CALL_EXPR_ARG (exp, 0);
1371 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1372 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1373 locality). */
1374 nargs = call_expr_nargs (exp);
1375 if (nargs > 1)
1376 arg1 = CALL_EXPR_ARG (exp, 1);
1377 else
1378 arg1 = integer_zero_node;
1379 if (nargs > 2)
1380 arg2 = CALL_EXPR_ARG (exp, 2);
1381 else
1382 arg2 = integer_three_node;
1384 /* Argument 0 is an address. */
1385 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1387 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1388 if (TREE_CODE (arg1) != INTEGER_CST)
1390 error ("second argument to %<__builtin_prefetch%> must be a constant");
1391 arg1 = integer_zero_node;
1393 op1 = expand_normal (arg1);
1394 /* Argument 1 must be either zero or one. */
1395 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1397 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1398 " using zero");
1399 op1 = const0_rtx;
1402 /* Argument 2 (locality) must be a compile-time constant int. */
1403 if (TREE_CODE (arg2) != INTEGER_CST)
1405 error ("third argument to %<__builtin_prefetch%> must be a constant");
1406 arg2 = integer_zero_node;
1408 op2 = expand_normal (arg2);
1409 /* Argument 2 must be 0, 1, 2, or 3. */
1410 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1412 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1413 op2 = const0_rtx;
1416 if (targetm.have_prefetch ())
1418 struct expand_operand ops[3];
1420 create_address_operand (&ops[0], op0);
1421 create_integer_operand (&ops[1], INTVAL (op1));
1422 create_integer_operand (&ops[2], INTVAL (op2));
1423 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1424 return;
1427 /* Don't do anything with direct references to volatile memory, but
1428 generate code to handle other side effects. */
1429 if (!MEM_P (op0) && side_effects_p (op0))
1430 emit_insn (op0);
1433 /* Get a MEM rtx for expression EXP which is the address of an operand
1434 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1435 the maximum length of the block of memory that might be accessed or
1436 NULL if unknown. */
1438 static rtx
1439 get_memory_rtx (tree exp, tree len)
1441 tree orig_exp = exp;
1442 rtx addr, mem;
1444 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1445 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1446 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1447 exp = TREE_OPERAND (exp, 0);
1449 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1450 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1452 /* Get an expression we can use to find the attributes to assign to MEM.
1453 First remove any nops. */
1454 while (CONVERT_EXPR_P (exp)
1455 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1456 exp = TREE_OPERAND (exp, 0);
1458 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1459 (as builtin stringops may alias with anything). */
1460 exp = fold_build2 (MEM_REF,
1461 build_array_type (char_type_node,
1462 build_range_type (sizetype,
1463 size_one_node, len)),
1464 exp, build_int_cst (ptr_type_node, 0));
1466 /* If the MEM_REF has no acceptable address, try to get the base object
1467 from the original address we got, and build an all-aliasing
1468 unknown-sized access to that one. */
1469 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1470 set_mem_attributes (mem, exp, 0);
1471 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1472 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1473 0))))
1475 exp = build_fold_addr_expr (exp);
1476 exp = fold_build2 (MEM_REF,
1477 build_array_type (char_type_node,
1478 build_range_type (sizetype,
1479 size_zero_node,
1480 NULL)),
1481 exp, build_int_cst (ptr_type_node, 0));
1482 set_mem_attributes (mem, exp, 0);
1484 set_mem_alias_set (mem, 0);
1485 return mem;
1488 /* Built-in functions to perform an untyped call and return. */
1490 #define apply_args_mode \
1491 (this_target_builtins->x_apply_args_mode)
1492 #define apply_result_mode \
1493 (this_target_builtins->x_apply_result_mode)
1495 /* Return the size required for the block returned by __builtin_apply_args,
1496 and initialize apply_args_mode. */
1498 static int
1499 apply_args_size (void)
1501 static int size = -1;
1502 int align;
1503 unsigned int regno;
1505 /* The values computed by this function never change. */
1506 if (size < 0)
1508 /* The first value is the incoming arg-pointer. */
1509 size = GET_MODE_SIZE (Pmode);
1511 /* The second value is the structure value address unless this is
1512 passed as an "invisible" first argument. */
1513 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1514 size += GET_MODE_SIZE (Pmode);
1516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 if (FUNCTION_ARG_REGNO_P (regno))
1519 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1521 gcc_assert (mode != VOIDmode);
1523 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1524 if (size % align != 0)
1525 size = CEIL (size, align) * align;
1526 size += GET_MODE_SIZE (mode);
1527 apply_args_mode[regno] = mode;
1529 else
1531 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1534 return size;
1537 /* Return the size required for the block returned by __builtin_apply,
1538 and initialize apply_result_mode. */
1540 static int
1541 apply_result_size (void)
1543 static int size = -1;
1544 int align, regno;
1546 /* The values computed by this function never change. */
1547 if (size < 0)
1549 size = 0;
1551 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1552 if (targetm.calls.function_value_regno_p (regno))
1554 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1556 gcc_assert (mode != VOIDmode);
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 size += GET_MODE_SIZE (mode);
1562 apply_result_mode[regno] = mode;
1564 else
1565 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1567 /* Allow targets that use untyped_call and untyped_return to override
1568 the size so that machine-specific information can be stored here. */
1569 #ifdef APPLY_RESULT_SIZE
1570 size = APPLY_RESULT_SIZE;
1571 #endif
1573 return size;
1576 /* Create a vector describing the result block RESULT. If SAVEP is true,
1577 the result block is used to save the values; otherwise it is used to
1578 restore the values. */
1580 static rtx
1581 result_vector (int savep, rtx result)
1583 int regno, size, align, nelts;
1584 fixed_size_mode mode;
1585 rtx reg, mem;
1586 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1588 size = nelts = 0;
1589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 if ((mode = apply_result_mode[regno]) != VOIDmode)
1592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 if (size % align != 0)
1594 size = CEIL (size, align) * align;
1595 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1596 mem = adjust_address (result, mode, size);
1597 savevec[nelts++] = (savep
1598 ? gen_rtx_SET (mem, reg)
1599 : gen_rtx_SET (reg, mem));
1600 size += GET_MODE_SIZE (mode);
1602 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1605 /* Save the state required to perform an untyped call with the same
1606 arguments as were passed to the current function. */
1608 static rtx
1609 expand_builtin_apply_args_1 (void)
1611 rtx registers, tem;
1612 int size, align, regno;
1613 fixed_size_mode mode;
1614 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1616 /* Create a block where the arg-pointer, structure value address,
1617 and argument registers can be saved. */
1618 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1620 /* Walk past the arg-pointer and structure value address. */
1621 size = GET_MODE_SIZE (Pmode);
1622 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1623 size += GET_MODE_SIZE (Pmode);
1625 /* Save each register used in calling a function to the block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1633 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1635 emit_move_insn (adjust_address (registers, mode, size), tem);
1636 size += GET_MODE_SIZE (mode);
1639 /* Save the arg pointer to the block. */
1640 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1641 /* We need the pointer as the caller actually passed them to us, not
1642 as we might have pretended they were passed. Make sure it's a valid
1643 operand, as emit_move_insn isn't expected to handle a PLUS. */
1644 if (STACK_GROWS_DOWNWARD)
1646 = force_operand (plus_constant (Pmode, tem,
1647 crtl->args.pretend_args_size),
1648 NULL_RTX);
1649 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1651 size = GET_MODE_SIZE (Pmode);
1653 /* Save the structure value address unless this is passed as an
1654 "invisible" first argument. */
1655 if (struct_incoming_value)
1657 emit_move_insn (adjust_address (registers, Pmode, size),
1658 copy_to_reg (struct_incoming_value));
1659 size += GET_MODE_SIZE (Pmode);
1662 /* Return the address of the block. */
1663 return copy_addr_to_reg (XEXP (registers, 0));
1666 /* __builtin_apply_args returns block of memory allocated on
1667 the stack into which is stored the arg pointer, structure
1668 value address, static chain, and all the registers that might
1669 possibly be used in performing a function call. The code is
1670 moved to the start of the function so the incoming values are
1671 saved. */
1673 static rtx
1674 expand_builtin_apply_args (void)
1676 /* Don't do __builtin_apply_args more than once in a function.
1677 Save the result of the first call and reuse it. */
1678 if (apply_args_value != 0)
1679 return apply_args_value;
1681 /* When this function is called, it means that registers must be
1682 saved on entry to this function. So we migrate the
1683 call to the first insn of this function. */
1684 rtx temp;
1686 start_sequence ();
1687 temp = expand_builtin_apply_args_1 ();
1688 rtx_insn *seq = get_insns ();
1689 end_sequence ();
1691 apply_args_value = temp;
1693 /* Put the insns after the NOTE that starts the function.
1694 If this is inside a start_sequence, make the outer-level insn
1695 chain current, so the code is placed at the start of the
1696 function. If internal_arg_pointer is a non-virtual pseudo,
1697 it needs to be placed after the function that initializes
1698 that pseudo. */
1699 push_topmost_sequence ();
1700 if (REG_P (crtl->args.internal_arg_pointer)
1701 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1702 emit_insn_before (seq, parm_birth_insn);
1703 else
1704 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1705 pop_topmost_sequence ();
1706 return temp;
1710 /* Perform an untyped call and save the state required to perform an
1711 untyped return of whatever value was returned by the given function. */
1713 static rtx
1714 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1716 int size, align, regno;
1717 fixed_size_mode mode;
1718 rtx incoming_args, result, reg, dest, src;
1719 rtx_call_insn *call_insn;
1720 rtx old_stack_level = 0;
1721 rtx call_fusage = 0;
1722 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1724 arguments = convert_memory_address (Pmode, arguments);
1726 /* Create a block where the return registers can be saved. */
1727 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1729 /* Fetch the arg pointer from the ARGUMENTS block. */
1730 incoming_args = gen_reg_rtx (Pmode);
1731 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1732 if (!STACK_GROWS_DOWNWARD)
1733 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1734 incoming_args, 0, OPTAB_LIB_WIDEN);
1736 /* Push a new argument block and copy the arguments. Do not allow
1737 the (potential) memcpy call below to interfere with our stack
1738 manipulations. */
1739 do_pending_stack_adjust ();
1740 NO_DEFER_POP;
1742 /* Save the stack with nonlocal if available. */
1743 if (targetm.have_save_stack_nonlocal ())
1744 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1745 else
1746 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1748 /* Allocate a block of memory onto the stack and copy the memory
1749 arguments to the outgoing arguments address. We can pass TRUE
1750 as the 4th argument because we just saved the stack pointer
1751 and will restore it right after the call. */
1752 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1754 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1755 may have already set current_function_calls_alloca to true.
1756 current_function_calls_alloca won't be set if argsize is zero,
1757 so we have to guarantee need_drap is true here. */
1758 if (SUPPORTS_STACK_ALIGNMENT)
1759 crtl->need_drap = true;
1761 dest = virtual_outgoing_args_rtx;
1762 if (!STACK_GROWS_DOWNWARD)
1764 if (CONST_INT_P (argsize))
1765 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1766 else
1767 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1769 dest = gen_rtx_MEM (BLKmode, dest);
1770 set_mem_align (dest, PARM_BOUNDARY);
1771 src = gen_rtx_MEM (BLKmode, incoming_args);
1772 set_mem_align (src, PARM_BOUNDARY);
1773 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1775 /* Refer to the argument block. */
1776 apply_args_size ();
1777 arguments = gen_rtx_MEM (BLKmode, arguments);
1778 set_mem_align (arguments, PARM_BOUNDARY);
1780 /* Walk past the arg-pointer and structure value address. */
1781 size = GET_MODE_SIZE (Pmode);
1782 if (struct_value)
1783 size += GET_MODE_SIZE (Pmode);
1785 /* Restore each of the registers previously saved. Make USE insns
1786 for each of these registers for use in making the call. */
1787 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1788 if ((mode = apply_args_mode[regno]) != VOIDmode)
1790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1791 if (size % align != 0)
1792 size = CEIL (size, align) * align;
1793 reg = gen_rtx_REG (mode, regno);
1794 emit_move_insn (reg, adjust_address (arguments, mode, size));
1795 use_reg (&call_fusage, reg);
1796 size += GET_MODE_SIZE (mode);
1799 /* Restore the structure value address unless this is passed as an
1800 "invisible" first argument. */
1801 size = GET_MODE_SIZE (Pmode);
1802 if (struct_value)
1804 rtx value = gen_reg_rtx (Pmode);
1805 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1806 emit_move_insn (struct_value, value);
1807 if (REG_P (struct_value))
1808 use_reg (&call_fusage, struct_value);
1809 size += GET_MODE_SIZE (Pmode);
1812 /* All arguments and registers used for the call are set up by now! */
1813 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1815 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1816 and we don't want to load it into a register as an optimization,
1817 because prepare_call_address already did it if it should be done. */
1818 if (GET_CODE (function) != SYMBOL_REF)
1819 function = memory_address (FUNCTION_MODE, function);
1821 /* Generate the actual call instruction and save the return value. */
1822 if (targetm.have_untyped_call ())
1824 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1825 emit_call_insn (targetm.gen_untyped_call (mem, result,
1826 result_vector (1, result)));
1828 else if (targetm.have_call_value ())
1830 rtx valreg = 0;
1832 /* Locate the unique return register. It is not possible to
1833 express a call that sets more than one return register using
1834 call_value; use untyped_call for that. In fact, untyped_call
1835 only needs to save the return registers in the given block. */
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if ((mode = apply_result_mode[regno]) != VOIDmode)
1839 gcc_assert (!valreg); /* have_untyped_call required. */
1841 valreg = gen_rtx_REG (mode, regno);
1844 emit_insn (targetm.gen_call_value (valreg,
1845 gen_rtx_MEM (FUNCTION_MODE, function),
1846 const0_rtx, NULL_RTX, const0_rtx));
1848 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1850 else
1851 gcc_unreachable ();
1853 /* Find the CALL insn we just emitted, and attach the register usage
1854 information. */
1855 call_insn = last_call_insn ();
1856 add_function_usage_to (call_insn, call_fusage);
1858 /* Restore the stack. */
1859 if (targetm.have_save_stack_nonlocal ())
1860 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1861 else
1862 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1863 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1865 OK_DEFER_POP;
1867 /* Return the address of the result block. */
1868 result = copy_addr_to_reg (XEXP (result, 0));
1869 return convert_memory_address (ptr_mode, result);
1872 /* Perform an untyped return. */
1874 static void
1875 expand_builtin_return (rtx result)
1877 int size, align, regno;
1878 fixed_size_mode mode;
1879 rtx reg;
1880 rtx_insn *call_fusage = 0;
1882 result = convert_memory_address (Pmode, result);
1884 apply_result_size ();
1885 result = gen_rtx_MEM (BLKmode, result);
1887 if (targetm.have_untyped_return ())
1889 rtx vector = result_vector (0, result);
1890 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1891 emit_barrier ();
1892 return;
1895 /* Restore the return value and note that each value is used. */
1896 size = 0;
1897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898 if ((mode = apply_result_mode[regno]) != VOIDmode)
1900 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1901 if (size % align != 0)
1902 size = CEIL (size, align) * align;
1903 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1904 emit_move_insn (reg, adjust_address (result, mode, size));
1906 push_to_sequence (call_fusage);
1907 emit_use (reg);
1908 call_fusage = get_insns ();
1909 end_sequence ();
1910 size += GET_MODE_SIZE (mode);
1913 /* Put the USE insns before the return. */
1914 emit_insn (call_fusage);
1916 /* Return whatever values was restored by jumping directly to the end
1917 of the function. */
1918 expand_naked_return ();
1921 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1923 static enum type_class
1924 type_to_class (tree type)
1926 switch (TREE_CODE (type))
1928 case VOID_TYPE: return void_type_class;
1929 case INTEGER_TYPE: return integer_type_class;
1930 case ENUMERAL_TYPE: return enumeral_type_class;
1931 case BOOLEAN_TYPE: return boolean_type_class;
1932 case POINTER_TYPE: return pointer_type_class;
1933 case REFERENCE_TYPE: return reference_type_class;
1934 case OFFSET_TYPE: return offset_type_class;
1935 case REAL_TYPE: return real_type_class;
1936 case COMPLEX_TYPE: return complex_type_class;
1937 case FUNCTION_TYPE: return function_type_class;
1938 case METHOD_TYPE: return method_type_class;
1939 case RECORD_TYPE: return record_type_class;
1940 case UNION_TYPE:
1941 case QUAL_UNION_TYPE: return union_type_class;
1942 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1943 ? string_type_class : array_type_class);
1944 case LANG_TYPE: return lang_type_class;
1945 default: return no_type_class;
1949 /* Expand a call EXP to __builtin_classify_type. */
1951 static rtx
1952 expand_builtin_classify_type (tree exp)
1954 if (call_expr_nargs (exp))
1955 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1956 return GEN_INT (no_type_class);
1959 /* This helper macro, meant to be used in mathfn_built_in below, determines
1960 which among a set of builtin math functions is appropriate for a given type
1961 mode. The `F' (float) and `L' (long double) are automatically generated
1962 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1963 types, there are additional types that are considered with 'F32', 'F64',
1964 'F128', etc. suffixes. */
1965 #define CASE_MATHFN(MATHFN) \
1966 CASE_CFN_##MATHFN: \
1967 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1968 fcodel = BUILT_IN_##MATHFN##L ; break;
1969 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1970 types. */
1971 #define CASE_MATHFN_FLOATN(MATHFN) \
1972 CASE_CFN_##MATHFN: \
1973 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1974 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1975 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1976 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1977 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1978 break;
1979 /* Similar to above, but appends _R after any F/L suffix. */
1980 #define CASE_MATHFN_REENT(MATHFN) \
1981 case CFN_BUILT_IN_##MATHFN##_R: \
1982 case CFN_BUILT_IN_##MATHFN##F_R: \
1983 case CFN_BUILT_IN_##MATHFN##L_R: \
1984 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1985 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1987 /* Return a function equivalent to FN but operating on floating-point
1988 values of type TYPE, or END_BUILTINS if no such function exists.
1989 This is purely an operation on function codes; it does not guarantee
1990 that the target actually has an implementation of the function. */
1992 static built_in_function
1993 mathfn_built_in_2 (tree type, combined_fn fn)
1995 tree mtype;
1996 built_in_function fcode, fcodef, fcodel;
1997 built_in_function fcodef16 = END_BUILTINS;
1998 built_in_function fcodef32 = END_BUILTINS;
1999 built_in_function fcodef64 = END_BUILTINS;
2000 built_in_function fcodef128 = END_BUILTINS;
2001 built_in_function fcodef32x = END_BUILTINS;
2002 built_in_function fcodef64x = END_BUILTINS;
2003 built_in_function fcodef128x = END_BUILTINS;
2005 switch (fn)
2007 CASE_MATHFN (ACOS)
2008 CASE_MATHFN (ACOSH)
2009 CASE_MATHFN (ASIN)
2010 CASE_MATHFN (ASINH)
2011 CASE_MATHFN (ATAN)
2012 CASE_MATHFN (ATAN2)
2013 CASE_MATHFN (ATANH)
2014 CASE_MATHFN (CBRT)
2015 CASE_MATHFN_FLOATN (CEIL)
2016 CASE_MATHFN (CEXPI)
2017 CASE_MATHFN_FLOATN (COPYSIGN)
2018 CASE_MATHFN (COS)
2019 CASE_MATHFN (COSH)
2020 CASE_MATHFN (DREM)
2021 CASE_MATHFN (ERF)
2022 CASE_MATHFN (ERFC)
2023 CASE_MATHFN (EXP)
2024 CASE_MATHFN (EXP10)
2025 CASE_MATHFN (EXP2)
2026 CASE_MATHFN (EXPM1)
2027 CASE_MATHFN (FABS)
2028 CASE_MATHFN (FDIM)
2029 CASE_MATHFN_FLOATN (FLOOR)
2030 CASE_MATHFN_FLOATN (FMA)
2031 CASE_MATHFN_FLOATN (FMAX)
2032 CASE_MATHFN_FLOATN (FMIN)
2033 CASE_MATHFN (FMOD)
2034 CASE_MATHFN (FREXP)
2035 CASE_MATHFN (GAMMA)
2036 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2037 CASE_MATHFN (HUGE_VAL)
2038 CASE_MATHFN (HYPOT)
2039 CASE_MATHFN (ILOGB)
2040 CASE_MATHFN (ICEIL)
2041 CASE_MATHFN (IFLOOR)
2042 CASE_MATHFN (INF)
2043 CASE_MATHFN (IRINT)
2044 CASE_MATHFN (IROUND)
2045 CASE_MATHFN (ISINF)
2046 CASE_MATHFN (J0)
2047 CASE_MATHFN (J1)
2048 CASE_MATHFN (JN)
2049 CASE_MATHFN (LCEIL)
2050 CASE_MATHFN (LDEXP)
2051 CASE_MATHFN (LFLOOR)
2052 CASE_MATHFN (LGAMMA)
2053 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2054 CASE_MATHFN (LLCEIL)
2055 CASE_MATHFN (LLFLOOR)
2056 CASE_MATHFN (LLRINT)
2057 CASE_MATHFN (LLROUND)
2058 CASE_MATHFN (LOG)
2059 CASE_MATHFN (LOG10)
2060 CASE_MATHFN (LOG1P)
2061 CASE_MATHFN (LOG2)
2062 CASE_MATHFN (LOGB)
2063 CASE_MATHFN (LRINT)
2064 CASE_MATHFN (LROUND)
2065 CASE_MATHFN (MODF)
2066 CASE_MATHFN (NAN)
2067 CASE_MATHFN (NANS)
2068 CASE_MATHFN_FLOATN (NEARBYINT)
2069 CASE_MATHFN (NEXTAFTER)
2070 CASE_MATHFN (NEXTTOWARD)
2071 CASE_MATHFN (POW)
2072 CASE_MATHFN (POWI)
2073 CASE_MATHFN (POW10)
2074 CASE_MATHFN (REMAINDER)
2075 CASE_MATHFN (REMQUO)
2076 CASE_MATHFN_FLOATN (RINT)
2077 CASE_MATHFN_FLOATN (ROUND)
2078 CASE_MATHFN (SCALB)
2079 CASE_MATHFN (SCALBLN)
2080 CASE_MATHFN (SCALBN)
2081 CASE_MATHFN (SIGNBIT)
2082 CASE_MATHFN (SIGNIFICAND)
2083 CASE_MATHFN (SIN)
2084 CASE_MATHFN (SINCOS)
2085 CASE_MATHFN (SINH)
2086 CASE_MATHFN_FLOATN (SQRT)
2087 CASE_MATHFN (TAN)
2088 CASE_MATHFN (TANH)
2089 CASE_MATHFN (TGAMMA)
2090 CASE_MATHFN_FLOATN (TRUNC)
2091 CASE_MATHFN (Y0)
2092 CASE_MATHFN (Y1)
2093 CASE_MATHFN (YN)
2095 default:
2096 return END_BUILTINS;
2099 mtype = TYPE_MAIN_VARIANT (type);
2100 if (mtype == double_type_node)
2101 return fcode;
2102 else if (mtype == float_type_node)
2103 return fcodef;
2104 else if (mtype == long_double_type_node)
2105 return fcodel;
2106 else if (mtype == float16_type_node)
2107 return fcodef16;
2108 else if (mtype == float32_type_node)
2109 return fcodef32;
2110 else if (mtype == float64_type_node)
2111 return fcodef64;
2112 else if (mtype == float128_type_node)
2113 return fcodef128;
2114 else if (mtype == float32x_type_node)
2115 return fcodef32x;
2116 else if (mtype == float64x_type_node)
2117 return fcodef64x;
2118 else if (mtype == float128x_type_node)
2119 return fcodef128x;
2120 else
2121 return END_BUILTINS;
2124 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2125 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2126 otherwise use the explicit declaration. If we can't do the conversion,
2127 return null. */
2129 static tree
2130 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2132 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2133 if (fcode2 == END_BUILTINS)
2134 return NULL_TREE;
2136 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2137 return NULL_TREE;
2139 return builtin_decl_explicit (fcode2);
2142 /* Like mathfn_built_in_1, but always use the implicit array. */
2144 tree
2145 mathfn_built_in (tree type, combined_fn fn)
2147 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2150 /* Like mathfn_built_in_1, but take a built_in_function and
2151 always use the implicit array. */
2153 tree
2154 mathfn_built_in (tree type, enum built_in_function fn)
2156 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2159 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2160 return its code, otherwise return IFN_LAST. Note that this function
2161 only tests whether the function is defined in internals.def, not whether
2162 it is actually available on the target. */
2164 internal_fn
2165 associated_internal_fn (tree fndecl)
2167 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2168 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2174 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2175 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2176 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2177 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2178 #include "internal-fn.def"
2180 CASE_FLT_FN (BUILT_IN_POW10):
2181 return IFN_EXP10;
2183 CASE_FLT_FN (BUILT_IN_DREM):
2184 return IFN_REMAINDER;
2186 CASE_FLT_FN (BUILT_IN_SCALBN):
2187 CASE_FLT_FN (BUILT_IN_SCALBLN):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2189 return IFN_LDEXP;
2190 return IFN_LAST;
2192 default:
2193 return IFN_LAST;
2197 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2198 on the current target by a call to an internal function, return the
2199 code of that internal function, otherwise return IFN_LAST. The caller
2200 is responsible for ensuring that any side-effects of the built-in
2201 call are dealt with correctly. E.g. if CALL sets errno, the caller
2202 must decide that the errno result isn't needed or make it available
2203 in some other way. */
2205 internal_fn
2206 replacement_internal_fn (gcall *call)
2208 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2210 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2211 if (ifn != IFN_LAST)
2213 tree_pair types = direct_internal_fn_types (ifn, call);
2214 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2215 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2216 return ifn;
2219 return IFN_LAST;
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2232 optab builtin_optab;
2233 rtx op0, op1, op2, result;
2234 rtx_insn *insns;
2235 tree fndecl = get_callee_fndecl (exp);
2236 tree arg0, arg1, arg2;
2237 machine_mode mode;
2239 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240 return NULL_RTX;
2242 arg0 = CALL_EXPR_ARG (exp, 0);
2243 arg1 = CALL_EXPR_ARG (exp, 1);
2244 arg2 = CALL_EXPR_ARG (exp, 2);
2246 switch (DECL_FUNCTION_CODE (fndecl))
2248 CASE_FLT_FN (BUILT_IN_FMA):
2249 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2262 result = gen_reg_rtx (mode);
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2273 start_sequence ();
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2294 return result;
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2307 optab builtin_optab;
2308 rtx op0;
2309 rtx_insn *insns;
2310 tree fndecl = get_callee_fndecl (exp);
2311 machine_mode mode;
2312 tree arg;
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2317 arg = CALL_EXPR_ARG (exp, 0);
2319 switch (DECL_FUNCTION_CODE (fndecl))
2321 CASE_FLT_FN (BUILT_IN_SIN):
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = sincos_optab; break;
2324 default:
2325 gcc_unreachable ();
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (exp));
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334 switch (DECL_FUNCTION_CODE (fndecl))
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 builtin_optab = sin_optab; break;
2338 CASE_FLT_FN (BUILT_IN_COS):
2339 builtin_optab = cos_optab; break;
2340 default:
2341 gcc_unreachable ();
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2347 rtx result = gen_reg_rtx (mode);
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2354 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2356 start_sequence ();
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab == sincos_optab)
2362 int ok;
2364 switch (DECL_FUNCTION_CODE (fndecl))
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 break;
2372 default:
2373 gcc_unreachable ();
2375 gcc_assert (ok);
2377 else
2378 result = expand_unop (mode, builtin_optab, op0, result, 0);
2380 if (result != 0)
2382 /* Output the entire sequence. */
2383 insns = get_insns ();
2384 end_sequence ();
2385 emit_insn (insns);
2386 return result;
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2392 end_sequence ();
2395 return expand_call (exp, target, target == const0_rtx);
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2405 bool errno_set = false;
2406 optab builtin_optab = unknown_optab;
2407 machine_mode mode;
2409 switch (DECL_FUNCTION_CODE (fndecl))
2411 CASE_FLT_FN (BUILT_IN_ILOGB):
2412 errno_set = true; builtin_optab = ilogb_optab; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF):
2414 builtin_optab = isinf_optab; break;
2415 case BUILT_IN_ISNORMAL:
2416 case BUILT_IN_ISFINITE:
2417 CASE_FLT_FN (BUILT_IN_FINITE):
2418 case BUILT_IN_FINITED32:
2419 case BUILT_IN_FINITED64:
2420 case BUILT_IN_FINITED128:
2421 case BUILT_IN_ISINFD32:
2422 case BUILT_IN_ISINFD64:
2423 case BUILT_IN_ISINFD128:
2424 /* These builtins have no optabs (yet). */
2425 break;
2426 default:
2427 gcc_unreachable ();
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math && errno_set)
2432 return CODE_FOR_nothing;
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 if (builtin_optab)
2438 return optab_handler (builtin_optab, mode);
2439 return CODE_FOR_nothing;
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2444 isnan, etc).
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2452 enum insn_code icode = CODE_FOR_nothing;
2453 rtx op0;
2454 tree fndecl = get_callee_fndecl (exp);
2455 machine_mode mode;
2456 tree arg;
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 icode = interclass_mathfn_icode (arg, fndecl);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2465 if (icode != CODE_FOR_nothing)
2467 struct expand_operand ops[1];
2468 rtx_insn *last = get_last_insn ();
2469 tree orig_arg = arg;
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2476 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2478 if (mode != GET_MODE (op0))
2479 op0 = convert_to_mode (mode, op0, 0);
2481 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482 if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 return ops[0].value;
2486 delete_insns_since (last);
2487 CALL_EXPR_ARG (exp, 0) = orig_arg;
2490 return NULL_RTX;
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function. */
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2501 rtx op0, op1, op2, target1, target2;
2502 machine_mode mode;
2503 tree arg, sinp, cosp;
2504 int result;
2505 location_t loc = EXPR_LOCATION (exp);
2506 tree alias_type, alias_off;
2508 if (!validate_arglist (exp, REAL_TYPE,
2509 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 sinp = CALL_EXPR_ARG (exp, 1);
2514 cosp = CALL_EXPR_ARG (exp, 2);
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (arg));
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521 return NULL_RTX;
2523 target1 = gen_reg_rtx (mode);
2524 target2 = gen_reg_rtx (mode);
2526 op0 = expand_normal (arg);
2527 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528 alias_off = build_int_cst (alias_type, 0);
2529 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 sinp, alias_off));
2531 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 cosp, alias_off));
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537 gcc_assert (result);
2539 /* Move target1 and target2 to the memory locations indicated
2540 by op1 and op2. */
2541 emit_move_insn (op1, target1);
2542 emit_move_insn (op2, target2);
2544 return const0_rtx;
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2554 tree fndecl = get_callee_fndecl (exp);
2555 tree arg, type;
2556 machine_mode mode;
2557 rtx op0, op1, op2;
2558 location_t loc = EXPR_LOCATION (exp);
2560 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 type = TREE_TYPE (arg);
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2572 op1 = gen_reg_rtx (mode);
2573 op2 = gen_reg_rtx (mode);
2575 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2580 else if (targetm.libc_has_function (function_sincos))
2582 tree call, fn = NULL_TREE;
2583 tree top1, top2;
2584 rtx op1a, op2a;
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592 else
2593 gcc_unreachable ();
2595 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597 op1a = copy_addr_to_reg (XEXP (op1, 0));
2598 op2a = copy_addr_to_reg (XEXP (op2, 0));
2599 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2602 /* Make sure not to fold the sincos call again. */
2603 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 call, 3, arg, top1, top2));
2607 else
2609 tree call, fn = NULL_TREE, narg;
2610 tree ctype = build_complex_type (type);
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618 else
2619 gcc_unreachable ();
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn == NULL_TREE)
2626 tree fntype;
2627 const char *name = NULL;
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 name = "cexpf";
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 name = "cexp";
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 name = "cexpl";
2636 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 fn = build_fn_decl (name, fntype);
2640 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 build_real (type, dconst0), arg);
2643 /* Make sure not to fold the cexp call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 target, VOIDmode, EXPAND_NORMAL);
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 make_tree (TREE_TYPE (arg), op2),
2652 make_tree (TREE_TYPE (arg), op1)),
2653 target, VOIDmode, EXPAND_NORMAL);
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2664 va_list ap;
2665 tree fntype = TREE_TYPE (fndecl);
2666 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2668 va_start (ap, n);
2669 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670 va_end (ap);
2671 SET_EXPR_LOCATION (fn, loc);
2672 return fn;
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2685 convert_optab builtin_optab;
2686 rtx op0, tmp;
2687 rtx_insn *insns;
2688 tree fndecl = get_callee_fndecl (exp);
2689 enum built_in_function fallback_fn;
2690 tree fallback_fndecl;
2691 machine_mode mode;
2692 tree arg;
2694 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695 gcc_unreachable ();
2697 arg = CALL_EXPR_ARG (exp, 0);
2699 switch (DECL_FUNCTION_CODE (fndecl))
2701 CASE_FLT_FN (BUILT_IN_ICEIL):
2702 CASE_FLT_FN (BUILT_IN_LCEIL):
2703 CASE_FLT_FN (BUILT_IN_LLCEIL):
2704 builtin_optab = lceil_optab;
2705 fallback_fn = BUILT_IN_CEIL;
2706 break;
2708 CASE_FLT_FN (BUILT_IN_IFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LFLOOR):
2710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711 builtin_optab = lfloor_optab;
2712 fallback_fn = BUILT_IN_FLOOR;
2713 break;
2715 default:
2716 gcc_unreachable ();
2719 /* Make a suitable register to place result in. */
2720 mode = TYPE_MODE (TREE_TYPE (exp));
2722 target = gen_reg_rtx (mode);
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2729 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2731 start_sequence ();
2733 /* Compute into TARGET. */
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return target;
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns). */
2745 end_sequence ();
2747 /* Fall back to floating point rounding optab. */
2748 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2750 /* For non-C99 targets we may end up without a fallback fndecl here
2751 if the user called __builtin_lfloor directly. In this case emit
2752 a call to the floor/ceil variants nevertheless. This should result
2753 in the best user experience for not full C99 targets. */
2754 if (fallback_fndecl == NULL_TREE)
2756 tree fntype;
2757 const char *name = NULL;
2759 switch (DECL_FUNCTION_CODE (fndecl))
2761 case BUILT_IN_ICEIL:
2762 case BUILT_IN_LCEIL:
2763 case BUILT_IN_LLCEIL:
2764 name = "ceil";
2765 break;
2766 case BUILT_IN_ICEILF:
2767 case BUILT_IN_LCEILF:
2768 case BUILT_IN_LLCEILF:
2769 name = "ceilf";
2770 break;
2771 case BUILT_IN_ICEILL:
2772 case BUILT_IN_LCEILL:
2773 case BUILT_IN_LLCEILL:
2774 name = "ceill";
2775 break;
2776 case BUILT_IN_IFLOOR:
2777 case BUILT_IN_LFLOOR:
2778 case BUILT_IN_LLFLOOR:
2779 name = "floor";
2780 break;
2781 case BUILT_IN_IFLOORF:
2782 case BUILT_IN_LFLOORF:
2783 case BUILT_IN_LLFLOORF:
2784 name = "floorf";
2785 break;
2786 case BUILT_IN_IFLOORL:
2787 case BUILT_IN_LFLOORL:
2788 case BUILT_IN_LLFLOORL:
2789 name = "floorl";
2790 break;
2791 default:
2792 gcc_unreachable ();
2795 fntype = build_function_type_list (TREE_TYPE (arg),
2796 TREE_TYPE (arg), NULL_TREE);
2797 fallback_fndecl = build_fn_decl (name, fntype);
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2802 tmp = expand_normal (exp);
2803 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2805 /* Truncate the result of floating point optab to integer
2806 via expand_fix (). */
2807 target = gen_reg_rtx (mode);
2808 expand_fix (target, tmp, 0);
2810 return target;
2813 /* Expand a call to one of the builtin math functions doing integer
2814 conversion (lrint).
2815 Return 0 if a normal call should be emitted rather than expanding the
2816 function in-line. EXP is the expression that is a call to the builtin
2817 function; if convenient, the result should be placed in TARGET. */
2819 static rtx
2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2822 convert_optab builtin_optab;
2823 rtx op0;
2824 rtx_insn *insns;
2825 tree fndecl = get_callee_fndecl (exp);
2826 tree arg;
2827 machine_mode mode;
2828 enum built_in_function fallback_fn = BUILT_IN_NONE;
2830 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831 gcc_unreachable ();
2833 arg = CALL_EXPR_ARG (exp, 0);
2835 switch (DECL_FUNCTION_CODE (fndecl))
2837 CASE_FLT_FN (BUILT_IN_IRINT):
2838 fallback_fn = BUILT_IN_LRINT;
2839 gcc_fallthrough ();
2840 CASE_FLT_FN (BUILT_IN_LRINT):
2841 CASE_FLT_FN (BUILT_IN_LLRINT):
2842 builtin_optab = lrint_optab;
2843 break;
2845 CASE_FLT_FN (BUILT_IN_IROUND):
2846 fallback_fn = BUILT_IN_LROUND;
2847 gcc_fallthrough ();
2848 CASE_FLT_FN (BUILT_IN_LROUND):
2849 CASE_FLT_FN (BUILT_IN_LLROUND):
2850 builtin_optab = lround_optab;
2851 break;
2853 default:
2854 gcc_unreachable ();
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859 return NULL_RTX;
2861 /* Make a suitable register to place result in. */
2862 mode = TYPE_MODE (TREE_TYPE (exp));
2864 /* There's no easy way to detect the case we need to set EDOM. */
2865 if (!flag_errno_math)
2867 rtx result = gen_reg_rtx (mode);
2869 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 need to expand the argument again. This way, we will not perform
2871 side-effects more the once. */
2872 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2874 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2876 start_sequence ();
2878 if (expand_sfix_optab (result, op0, builtin_optab))
2880 /* Output the entire sequence. */
2881 insns = get_insns ();
2882 end_sequence ();
2883 emit_insn (insns);
2884 return result;
2887 /* If we were unable to expand via the builtin, stop the sequence
2888 (without outputting the insns) and call to the library function
2889 with the stabilized argument list. */
2890 end_sequence ();
2893 if (fallback_fn != BUILT_IN_NONE)
2895 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2896 targets, (int) round (x) should never be transformed into
2897 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 a call to lround in the hope that the target provides at least some
2899 C99 functions. This should result in the best user experience for
2900 not full C99 targets. */
2901 tree fallback_fndecl = mathfn_built_in_1
2902 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2904 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 fallback_fndecl, 1, arg);
2907 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909 return convert_to_mode (mode, target, 0);
2912 return expand_call (exp, target, target == const0_rtx);
2915 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2916 a normal call should be emitted rather than expanding the function
2917 in-line. EXP is the expression that is a call to the builtin
2918 function; if convenient, the result should be placed in TARGET. */
2920 static rtx
2921 expand_builtin_powi (tree exp, rtx target)
2923 tree arg0, arg1;
2924 rtx op0, op1;
2925 machine_mode mode;
2926 machine_mode mode2;
2928 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2933 mode = TYPE_MODE (TREE_TYPE (exp));
2935 /* Emit a libcall to libgcc. */
2937 /* Mode of the 2nd argument must match that of an int. */
2938 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2940 if (target == NULL_RTX)
2941 target = gen_reg_rtx (mode);
2943 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944 if (GET_MODE (op0) != mode)
2945 op0 = convert_to_mode (mode, op0, 0);
2946 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947 if (GET_MODE (op1) != mode2)
2948 op1 = convert_to_mode (mode2, op1, 0);
2950 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 target, LCT_CONST, mode,
2952 op0, mode, op1, mode2);
2954 return target;
2957 /* Expand expression EXP which is a call to the strlen builtin. Return
2958 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2959 try to get the result in TARGET, if convenient. */
2961 static rtx
2962 expand_builtin_strlen (tree exp, rtx target,
2963 machine_mode target_mode)
2965 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2968 struct expand_operand ops[4];
2969 rtx pat;
2970 tree len;
2971 tree src = CALL_EXPR_ARG (exp, 0);
2972 rtx src_reg;
2973 rtx_insn *before_strlen;
2974 machine_mode insn_mode;
2975 enum insn_code icode = CODE_FOR_nothing;
2976 unsigned int align;
2978 /* If the length can be computed at compile-time, return it. */
2979 len = c_strlen (src, 0);
2980 if (len)
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len = c_strlen (src, 1);
2989 if (len && TREE_CODE (len) == INTEGER_CST)
2991 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2995 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2997 /* If SRC is not a pointer type, don't do this operation inline. */
2998 if (align == 0)
2999 return NULL_RTX;
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3004 icode = optab_handler (strlen_optab, insn_mode);
3005 if (icode != CODE_FOR_nothing)
3006 break;
3008 if (insn_mode == VOIDmode)
3009 return NULL_RTX;
3011 /* Make a place to hold the source address. We will not expand
3012 the actual source until we are sure that the expansion will
3013 not fail -- there are trees that cannot be expanded twice. */
3014 src_reg = gen_reg_rtx (Pmode);
3016 /* Mark the beginning of the strlen sequence so we can emit the
3017 source operand later. */
3018 before_strlen = get_last_insn ();
3020 create_output_operand (&ops[0], target, insn_mode);
3021 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022 create_integer_operand (&ops[2], 0);
3023 create_integer_operand (&ops[3], align);
3024 if (!maybe_expand_insn (icode, 4, ops))
3025 return NULL_RTX;
3027 /* Check to see if the argument was declared attribute nonstring
3028 and if so, issue a warning since at this point it's not known
3029 to be nul-terminated. */
3030 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3032 /* Now that we are assured of success, expand the source. */
3033 start_sequence ();
3034 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3035 if (pat != src_reg)
3037 #ifdef POINTERS_EXTEND_UNSIGNED
3038 if (GET_MODE (pat) != Pmode)
3039 pat = convert_to_mode (Pmode, pat,
3040 POINTERS_EXTEND_UNSIGNED);
3041 #endif
3042 emit_move_insn (src_reg, pat);
3044 pat = get_insns ();
3045 end_sequence ();
3047 if (before_strlen)
3048 emit_insn_after (pat, before_strlen);
3049 else
3050 emit_insn_before (pat, get_insns ());
3052 /* Return the value in the proper mode for this function. */
3053 if (GET_MODE (ops[0].value) == target_mode)
3054 target = ops[0].value;
3055 else if (target != 0)
3056 convert_move (target, ops[0].value, 0);
3057 else
3058 target = convert_to_mode (target_mode, ops[0].value, 0);
3060 return target;
3063 /* Expand call EXP to the strnlen built-in, returning the result
3064 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3066 static rtx
3067 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3069 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3070 return NULL_RTX;
3072 tree src = CALL_EXPR_ARG (exp, 0);
3073 tree bound = CALL_EXPR_ARG (exp, 1);
3075 if (!bound)
3076 return NULL_RTX;
3078 location_t loc = UNKNOWN_LOCATION;
3079 if (EXPR_HAS_LOCATION (exp))
3080 loc = EXPR_LOCATION (exp);
3082 tree maxobjsize = max_object_size ();
3083 tree func = get_callee_fndecl (exp);
3085 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3086 so these conversions aren't necessary. */
3087 c_strlen_data lendata = { };
3088 tree len = c_strlen (src, 0, &lendata, 1);
3089 if (len)
3090 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3092 if (TREE_CODE (bound) == INTEGER_CST)
3094 if (!TREE_NO_WARNING (exp)
3095 && tree_int_cst_lt (maxobjsize, bound)
3096 && warning_at (loc, OPT_Wstringop_overflow_,
3097 "%K%qD specified bound %E "
3098 "exceeds maximum object size %E",
3099 exp, func, bound, maxobjsize))
3100 TREE_NO_WARNING (exp) = true;
3102 bool exact = true;
3103 if (!len || TREE_CODE (len) != INTEGER_CST)
3105 /* Clear EXACT if LEN may be less than SRC suggests,
3106 such as in
3107 strnlen (&a[i], sizeof a)
3108 where the value of i is unknown. Unless i's value is
3109 zero, the call is unsafe because the bound is greater. */
3110 lendata.decl = unterminated_array (src, &len, &exact);
3111 if (!lendata.decl)
3112 return NULL_RTX;
3115 if (lendata.decl
3116 && !TREE_NO_WARNING (exp)
3117 && ((tree_int_cst_lt (len, bound))
3118 || !exact))
3120 location_t warnloc
3121 = expansion_point_location_if_in_system_header (loc);
3123 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3124 exact
3125 ? G_("%K%qD specified bound %E exceeds the size %E "
3126 "of unterminated array")
3127 : G_("%K%qD specified bound %E may exceed the size "
3128 "of at most %E of unterminated array"),
3129 exp, func, bound, len))
3131 inform (DECL_SOURCE_LOCATION (lendata.decl),
3132 "referenced argument declared here");
3133 TREE_NO_WARNING (exp) = true;
3134 return NULL_RTX;
3138 if (!len)
3139 return NULL_RTX;
3141 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 if (TREE_CODE (bound) != SSA_NAME)
3146 return NULL_RTX;
3148 wide_int min, max;
3149 enum value_range_kind rng = get_range_info (bound, &min, &max);
3150 if (rng != VR_RANGE)
3151 return NULL_RTX;
3153 if (!TREE_NO_WARNING (exp)
3154 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3155 && warning_at (loc, OPT_Wstringop_overflow_,
3156 "%K%qD specified bound [%wu, %wu] "
3157 "exceeds maximum object size %E",
3158 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3159 TREE_NO_WARNING (exp) = true;
3161 bool exact = true;
3162 if (!len || TREE_CODE (len) != INTEGER_CST)
3164 lendata.decl = unterminated_array (src, &len, &exact);
3165 if (!lendata.decl)
3166 return NULL_RTX;
3169 if (lendata.decl
3170 && !TREE_NO_WARNING (exp)
3171 && (wi::ltu_p (wi::to_wide (len), min)
3172 || !exact))
3174 location_t warnloc
3175 = expansion_point_location_if_in_system_header (loc);
3177 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3178 exact
3179 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3180 "the size %E of unterminated array")
3181 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3182 "the size of at most %E of unterminated array"),
3183 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3185 inform (DECL_SOURCE_LOCATION (lendata.decl),
3186 "referenced argument declared here");
3187 TREE_NO_WARNING (exp) = true;
3191 if (lendata.decl)
3192 return NULL_RTX;
3194 if (wi::gtu_p (min, wi::to_wide (len)))
3195 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3197 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3198 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3201 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3202 bytes from constant string DATA + OFFSET and return it as target
3203 constant. */
3205 static rtx
3206 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3207 scalar_int_mode mode)
3209 const char *str = (const char *) data;
3211 gcc_assert (offset >= 0
3212 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3213 <= strlen (str) + 1));
3215 return c_readstr (str + offset, mode);
3218 /* LEN specify length of the block of memcpy/memset operation.
3219 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3220 In some cases we can make very likely guess on max size, then we
3221 set it into PROBABLE_MAX_SIZE. */
3223 static void
3224 determine_block_size (tree len, rtx len_rtx,
3225 unsigned HOST_WIDE_INT *min_size,
3226 unsigned HOST_WIDE_INT *max_size,
3227 unsigned HOST_WIDE_INT *probable_max_size)
3229 if (CONST_INT_P (len_rtx))
3231 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3232 return;
3234 else
3236 wide_int min, max;
3237 enum value_range_kind range_type = VR_UNDEFINED;
3239 /* Determine bounds from the type. */
3240 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3241 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3242 else
3243 *min_size = 0;
3244 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3245 *probable_max_size = *max_size
3246 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3247 else
3248 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3250 if (TREE_CODE (len) == SSA_NAME)
3251 range_type = get_range_info (len, &min, &max);
3252 if (range_type == VR_RANGE)
3254 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3255 *min_size = min.to_uhwi ();
3256 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3257 *probable_max_size = *max_size = max.to_uhwi ();
3259 else if (range_type == VR_ANTI_RANGE)
3261 /* Anti range 0...N lets us to determine minimal size to N+1. */
3262 if (min == 0)
3264 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3265 *min_size = max.to_uhwi () + 1;
3267 /* Code like
3269 int n;
3270 if (n < 100)
3271 memcpy (a, b, n)
3273 Produce anti range allowing negative values of N. We still
3274 can use the information and make a guess that N is not negative.
3276 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3277 *probable_max_size = min.to_uhwi () - 1;
3280 gcc_checking_assert (*max_size <=
3281 (unsigned HOST_WIDE_INT)
3282 GET_MODE_MASK (GET_MODE (len_rtx)));
3285 /* Try to verify that the sizes and lengths of the arguments to a string
3286 manipulation function given by EXP are within valid bounds and that
3287 the operation does not lead to buffer overflow or read past the end.
3288 Arguments other than EXP may be null. When non-null, the arguments
3289 have the following meaning:
3290 DST is the destination of a copy call or NULL otherwise.
3291 SRC is the source of a copy call or NULL otherwise.
3292 DSTWRITE is the number of bytes written into the destination obtained
3293 from the user-supplied size argument to the function (such as in
3294 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3295 MAXREAD is the user-supplied bound on the length of the source sequence
3296 (such as in strncat(d, s, N). It specifies the upper limit on the number
3297 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3298 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3299 expression EXP is a string function call (as opposed to a memory call
3300 like memcpy). As an exception, SRCSTR can also be an integer denoting
3301 the precomputed size of the source string or object (for functions like
3302 memcpy).
3303 DSTSIZE is the size of the destination object specified by the last
3304 argument to the _chk builtins, typically resulting from the expansion
3305 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3306 DSTSIZE).
3308 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3309 SIZE_MAX.
3311 If the call is successfully verified as safe return true, otherwise
3312 return false. */
3314 static bool
3315 check_access (tree exp, tree, tree, tree dstwrite,
3316 tree maxread, tree srcstr, tree dstsize)
3318 int opt = OPT_Wstringop_overflow_;
3320 /* The size of the largest object is half the address space, or
3321 PTRDIFF_MAX. (This is way too permissive.) */
3322 tree maxobjsize = max_object_size ();
3324 /* Either the length of the source string for string functions or
3325 the size of the source object for raw memory functions. */
3326 tree slen = NULL_TREE;
3328 tree range[2] = { NULL_TREE, NULL_TREE };
3330 /* Set to true when the exact number of bytes written by a string
3331 function like strcpy is not known and the only thing that is
3332 known is that it must be at least one (for the terminating nul). */
3333 bool at_least_one = false;
3334 if (srcstr)
3336 /* SRCSTR is normally a pointer to string but as a special case
3337 it can be an integer denoting the length of a string. */
3338 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3340 /* Try to determine the range of lengths the source string
3341 refers to. If it can be determined and is less than
3342 the upper bound given by MAXREAD add one to it for
3343 the terminating nul. Otherwise, set it to one for
3344 the same reason, or to MAXREAD as appropriate. */
3345 c_strlen_data lendata = { };
3346 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3347 range[0] = lendata.minlen;
3348 range[1] = lendata.maxbound;
3349 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3351 if (maxread && tree_int_cst_le (maxread, range[0]))
3352 range[0] = range[1] = maxread;
3353 else
3354 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3355 range[0], size_one_node);
3357 if (maxread && tree_int_cst_le (maxread, range[1]))
3358 range[1] = maxread;
3359 else if (!integer_all_onesp (range[1]))
3360 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3361 range[1], size_one_node);
3363 slen = range[0];
3365 else
3367 at_least_one = true;
3368 slen = size_one_node;
3371 else
3372 slen = srcstr;
3375 if (!dstwrite && !maxread)
3377 /* When the only available piece of data is the object size
3378 there is nothing to do. */
3379 if (!slen)
3380 return true;
3382 /* Otherwise, when the length of the source sequence is known
3383 (as with strlen), set DSTWRITE to it. */
3384 if (!range[0])
3385 dstwrite = slen;
3388 if (!dstsize)
3389 dstsize = maxobjsize;
3391 if (dstwrite)
3392 get_size_range (dstwrite, range);
3394 tree func = get_callee_fndecl (exp);
3396 /* First check the number of bytes to be written against the maximum
3397 object size. */
3398 if (range[0]
3399 && TREE_CODE (range[0]) == INTEGER_CST
3400 && tree_int_cst_lt (maxobjsize, range[0]))
3402 if (TREE_NO_WARNING (exp))
3403 return false;
3405 location_t loc = tree_nonartificial_location (exp);
3406 loc = expansion_point_location_if_in_system_header (loc);
3408 bool warned;
3409 if (range[0] == range[1])
3410 warned = warning_at (loc, opt,
3411 "%K%qD specified size %E "
3412 "exceeds maximum object size %E",
3413 exp, func, range[0], maxobjsize);
3414 else
3415 warned = warning_at (loc, opt,
3416 "%K%qD specified size between %E and %E "
3417 "exceeds maximum object size %E",
3418 exp, func,
3419 range[0], range[1], maxobjsize);
3420 if (warned)
3421 TREE_NO_WARNING (exp) = true;
3423 return false;
3426 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3427 constant, and in range of unsigned HOST_WIDE_INT. */
3428 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3430 /* Next check the number of bytes to be written against the destination
3431 object size. */
3432 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3434 if (range[0]
3435 && TREE_CODE (range[0]) == INTEGER_CST
3436 && ((tree_fits_uhwi_p (dstsize)
3437 && tree_int_cst_lt (dstsize, range[0]))
3438 || (dstwrite
3439 && tree_fits_uhwi_p (dstwrite)
3440 && tree_int_cst_lt (dstwrite, range[0]))))
3442 if (TREE_NO_WARNING (exp))
3443 return false;
3445 location_t loc = tree_nonartificial_location (exp);
3446 loc = expansion_point_location_if_in_system_header (loc);
3448 if (dstwrite == slen && at_least_one)
3450 /* This is a call to strcpy with a destination of 0 size
3451 and a source of unknown length. The call will write
3452 at least one byte past the end of the destination. */
3453 warning_at (loc, opt,
3454 "%K%qD writing %E or more bytes into a region "
3455 "of size %E overflows the destination",
3456 exp, func, range[0], dstsize);
3458 else if (tree_int_cst_equal (range[0], range[1]))
3459 warning_n (loc, opt, tree_to_uhwi (range[0]),
3460 "%K%qD writing %E byte into a region "
3461 "of size %E overflows the destination",
3462 "%K%qD writing %E bytes into a region "
3463 "of size %E overflows the destination",
3464 exp, func, range[0], dstsize);
3465 else if (tree_int_cst_sign_bit (range[1]))
3467 /* Avoid printing the upper bound if it's invalid. */
3468 warning_at (loc, opt,
3469 "%K%qD writing %E or more bytes into a region "
3470 "of size %E overflows the destination",
3471 exp, func, range[0], dstsize);
3473 else
3474 warning_at (loc, opt,
3475 "%K%qD writing between %E and %E bytes into "
3476 "a region of size %E overflows the destination",
3477 exp, func, range[0], range[1],
3478 dstsize);
3480 /* Return error when an overflow has been detected. */
3481 return false;
3485 /* Check the maximum length of the source sequence against the size
3486 of the destination object if known, or against the maximum size
3487 of an object. */
3488 if (maxread)
3490 get_size_range (maxread, range);
3492 /* Use the lower end for MAXREAD from now on. */
3493 if (range[0])
3494 maxread = range[0];
3496 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3498 location_t loc = tree_nonartificial_location (exp);
3499 loc = expansion_point_location_if_in_system_header (loc);
3501 if (tree_int_cst_lt (maxobjsize, range[0]))
3503 if (TREE_NO_WARNING (exp))
3504 return false;
3506 /* Warn about crazy big sizes first since that's more
3507 likely to be meaningful than saying that the bound
3508 is greater than the object size if both are big. */
3509 if (range[0] == range[1])
3510 warning_at (loc, opt,
3511 "%K%qD specified bound %E "
3512 "exceeds maximum object size %E",
3513 exp, func,
3514 range[0], maxobjsize);
3515 else
3516 warning_at (loc, opt,
3517 "%K%qD specified bound between %E and %E "
3518 "exceeds maximum object size %E",
3519 exp, func,
3520 range[0], range[1], maxobjsize);
3522 return false;
3525 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3527 if (TREE_NO_WARNING (exp))
3528 return false;
3530 if (tree_int_cst_equal (range[0], range[1]))
3531 warning_at (loc, opt,
3532 "%K%qD specified bound %E "
3533 "exceeds destination size %E",
3534 exp, func,
3535 range[0], dstsize);
3536 else
3537 warning_at (loc, opt,
3538 "%K%qD specified bound between %E and %E "
3539 "exceeds destination size %E",
3540 exp, func,
3541 range[0], range[1], dstsize);
3542 return false;
3547 /* Check for reading past the end of SRC. */
3548 if (slen
3549 && slen == srcstr
3550 && dstwrite && range[0]
3551 && tree_int_cst_lt (slen, range[0]))
3553 if (TREE_NO_WARNING (exp))
3554 return false;
3556 location_t loc = tree_nonartificial_location (exp);
3558 if (tree_int_cst_equal (range[0], range[1]))
3559 warning_n (loc, opt, tree_to_uhwi (range[0]),
3560 "%K%qD reading %E byte from a region of size %E",
3561 "%K%qD reading %E bytes from a region of size %E",
3562 exp, func, range[0], slen);
3563 else if (tree_int_cst_sign_bit (range[1]))
3565 /* Avoid printing the upper bound if it's invalid. */
3566 warning_at (loc, opt,
3567 "%K%qD reading %E or more bytes from a region "
3568 "of size %E",
3569 exp, func, range[0], slen);
3571 else
3572 warning_at (loc, opt,
3573 "%K%qD reading between %E and %E bytes from a region "
3574 "of size %E",
3575 exp, func, range[0], range[1], slen);
3576 return false;
3579 return true;
3582 /* Helper to compute the size of the object referenced by the DEST
3583 expression which must have pointer type, using Object Size type
3584 OSTYPE (only the least significant 2 bits are used). Return
3585 an estimate of the size of the object if successful or NULL when
3586 the size cannot be determined. When the referenced object involves
3587 a non-constant offset in some range the returned value represents
3588 the largest size given the smallest non-negative offset in the
3589 range. The function is intended for diagnostics and should not
3590 be used to influence code generation or optimization. */
3592 tree
3593 compute_objsize (tree dest, int ostype)
3595 unsigned HOST_WIDE_INT size;
3597 /* Only the two least significant bits are meaningful. */
3598 ostype &= 3;
3600 if (compute_builtin_object_size (dest, ostype, &size))
3601 return build_int_cst (sizetype, size);
3603 if (TREE_CODE (dest) == SSA_NAME)
3605 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3606 if (!is_gimple_assign (stmt))
3607 return NULL_TREE;
3609 dest = gimple_assign_rhs1 (stmt);
3611 tree_code code = gimple_assign_rhs_code (stmt);
3612 if (code == POINTER_PLUS_EXPR)
3614 /* compute_builtin_object_size fails for addresses with
3615 non-constant offsets. Try to determine the range of
3616 such an offset here and use it to adjust the constant
3617 size. */
3618 tree off = gimple_assign_rhs2 (stmt);
3619 if (TREE_CODE (off) == INTEGER_CST)
3621 if (tree size = compute_objsize (dest, ostype))
3623 wide_int wioff = wi::to_wide (off);
3624 wide_int wisiz = wi::to_wide (size);
3626 /* Ignore negative offsets for now. For others,
3627 use the lower bound as the most optimistic
3628 estimate of the (remaining) size. */
3629 if (wi::sign_mask (wioff))
3631 else if (wi::ltu_p (wioff, wisiz))
3632 return wide_int_to_tree (TREE_TYPE (size),
3633 wi::sub (wisiz, wioff));
3634 else
3635 return size_zero_node;
3638 else if (TREE_CODE (off) == SSA_NAME
3639 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3641 wide_int min, max;
3642 enum value_range_kind rng = get_range_info (off, &min, &max);
3644 if (rng == VR_RANGE)
3646 if (tree size = compute_objsize (dest, ostype))
3648 wide_int wisiz = wi::to_wide (size);
3650 /* Ignore negative offsets for now. For others,
3651 use the lower bound as the most optimistic
3652 estimate of the (remaining)size. */
3653 if (wi::sign_mask (min))
3655 else if (wi::ltu_p (min, wisiz))
3656 return wide_int_to_tree (TREE_TYPE (size),
3657 wi::sub (wisiz, min));
3658 else
3659 return size_zero_node;
3664 else if (code != ADDR_EXPR)
3665 return NULL_TREE;
3668 /* Unless computing the largest size (for memcpy and other raw memory
3669 functions), try to determine the size of the object from its type. */
3670 if (!ostype)
3671 return NULL_TREE;
3673 if (TREE_CODE (dest) != ADDR_EXPR)
3674 return NULL_TREE;
3676 tree type = TREE_TYPE (dest);
3677 if (TREE_CODE (type) == POINTER_TYPE)
3678 type = TREE_TYPE (type);
3680 type = TYPE_MAIN_VARIANT (type);
3682 if (TREE_CODE (type) == ARRAY_TYPE
3683 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3685 /* Return the constant size unless it's zero (that's a zero-length
3686 array likely at the end of a struct). */
3687 tree size = TYPE_SIZE_UNIT (type);
3688 if (size && TREE_CODE (size) == INTEGER_CST
3689 && !integer_zerop (size))
3690 return size;
3693 return NULL_TREE;
3696 /* Helper to determine and check the sizes of the source and the destination
3697 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3698 call expression, DEST is the destination argument, SRC is the source
3699 argument or null, and LEN is the number of bytes. Use Object Size type-0
3700 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3701 (no overflow or invalid sizes), false otherwise. */
3703 static bool
3704 check_memop_access (tree exp, tree dest, tree src, tree size)
3706 /* For functions like memset and memcpy that operate on raw memory
3707 try to determine the size of the largest source and destination
3708 object using type-0 Object Size regardless of the object size
3709 type specified by the option. */
3710 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3711 tree dstsize = compute_objsize (dest, 0);
3713 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3714 srcsize, dstsize);
3717 /* Validate memchr arguments without performing any expansion.
3718 Return NULL_RTX. */
3720 static rtx
3721 expand_builtin_memchr (tree exp, rtx)
3723 if (!validate_arglist (exp,
3724 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3725 return NULL_RTX;
3727 tree arg1 = CALL_EXPR_ARG (exp, 0);
3728 tree len = CALL_EXPR_ARG (exp, 2);
3730 /* Diagnose calls where the specified length exceeds the size
3731 of the object. */
3732 if (warn_stringop_overflow)
3734 tree size = compute_objsize (arg1, 0);
3735 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3736 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3739 return NULL_RTX;
3742 /* Expand a call EXP to the memcpy builtin.
3743 Return NULL_RTX if we failed, the caller should emit a normal call,
3744 otherwise try to get the result in TARGET, if convenient (and in
3745 mode MODE if that's convenient). */
3747 static rtx
3748 expand_builtin_memcpy (tree exp, rtx target)
3750 if (!validate_arglist (exp,
3751 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3752 return NULL_RTX;
3754 tree dest = CALL_EXPR_ARG (exp, 0);
3755 tree src = CALL_EXPR_ARG (exp, 1);
3756 tree len = CALL_EXPR_ARG (exp, 2);
3758 check_memop_access (exp, dest, src, len);
3760 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3761 /*retmode=*/ RETURN_BEGIN);
3764 /* Check a call EXP to the memmove built-in for validity.
3765 Return NULL_RTX on both success and failure. */
3767 static rtx
3768 expand_builtin_memmove (tree exp, rtx)
3770 if (!validate_arglist (exp,
3771 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3772 return NULL_RTX;
3774 tree dest = CALL_EXPR_ARG (exp, 0);
3775 tree src = CALL_EXPR_ARG (exp, 1);
3776 tree len = CALL_EXPR_ARG (exp, 2);
3778 check_memop_access (exp, dest, src, len);
3780 return NULL_RTX;
3783 /* Expand a call EXP to the mempcpy builtin.
3784 Return NULL_RTX if we failed; the caller should emit a normal call,
3785 otherwise try to get the result in TARGET, if convenient (and in
3786 mode MODE if that's convenient). */
3788 static rtx
3789 expand_builtin_mempcpy (tree exp, rtx target)
3791 if (!validate_arglist (exp,
3792 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3793 return NULL_RTX;
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3799 /* Policy does not generally allow using compute_objsize (which
3800 is used internally by check_memop_size) to change code generation
3801 or drive optimization decisions.
3803 In this instance it is safe because the code we generate has
3804 the same semantics regardless of the return value of
3805 check_memop_sizes. Exactly the same amount of data is copied
3806 and the return value is exactly the same in both cases.
3808 Furthermore, check_memop_size always uses mode 0 for the call to
3809 compute_objsize, so the imprecise nature of compute_objsize is
3810 avoided. */
3812 /* Avoid expanding mempcpy into memcpy when the call is determined
3813 to overflow the buffer. This also prevents the same overflow
3814 from being diagnosed again when expanding memcpy. */
3815 if (!check_memop_access (exp, dest, src, len))
3816 return NULL_RTX;
3818 return expand_builtin_mempcpy_args (dest, src, len,
3819 target, exp, /*retmode=*/ RETURN_END);
3822 /* Helper function to do the actual work for expand of memory copy family
3823 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3824 of memory from SRC to DEST and assign to TARGET if convenient. Return
3825 value is based on RETMODE argument. */
3827 static rtx
3828 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3829 rtx target, tree exp, memop_ret retmode)
3831 const char *src_str;
3832 unsigned int src_align = get_pointer_alignment (src);
3833 unsigned int dest_align = get_pointer_alignment (dest);
3834 rtx dest_mem, src_mem, dest_addr, len_rtx;
3835 HOST_WIDE_INT expected_size = -1;
3836 unsigned int expected_align = 0;
3837 unsigned HOST_WIDE_INT min_size;
3838 unsigned HOST_WIDE_INT max_size;
3839 unsigned HOST_WIDE_INT probable_max_size;
3841 /* If DEST is not a pointer type, call the normal function. */
3842 if (dest_align == 0)
3843 return NULL_RTX;
3845 /* If either SRC is not a pointer type, don't do this
3846 operation in-line. */
3847 if (src_align == 0)
3848 return NULL_RTX;
3850 if (currently_expanding_gimple_stmt)
3851 stringop_block_profile (currently_expanding_gimple_stmt,
3852 &expected_align, &expected_size);
3854 if (expected_align < dest_align)
3855 expected_align = dest_align;
3856 dest_mem = get_memory_rtx (dest, len);
3857 set_mem_align (dest_mem, dest_align);
3858 len_rtx = expand_normal (len);
3859 determine_block_size (len, len_rtx, &min_size, &max_size,
3860 &probable_max_size);
3861 src_str = c_getstr (src);
3863 /* If SRC is a string constant and block move would be done
3864 by pieces, we can avoid loading the string from memory
3865 and only stored the computed constants. */
3866 if (src_str
3867 && CONST_INT_P (len_rtx)
3868 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3869 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3870 CONST_CAST (char *, src_str),
3871 dest_align, false))
3873 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3874 builtin_memcpy_read_str,
3875 CONST_CAST (char *, src_str),
3876 dest_align, false, retmode);
3877 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3878 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3879 return dest_mem;
3882 src_mem = get_memory_rtx (src, len);
3883 set_mem_align (src_mem, src_align);
3885 /* Copy word part most expediently. */
3886 enum block_op_methods method = BLOCK_OP_NORMAL;
3887 if (CALL_EXPR_TAILCALL (exp)
3888 && (retmode == RETURN_BEGIN || target == const0_rtx))
3889 method = BLOCK_OP_TAILCALL;
3890 if (retmode == RETURN_END && target != const0_rtx)
3891 method = BLOCK_OP_NO_LIBCALL_RET;
3892 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3893 expected_align, expected_size,
3894 min_size, max_size, probable_max_size);
3895 if (dest_addr == pc_rtx)
3896 return NULL_RTX;
3898 if (dest_addr == 0)
3900 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3901 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3904 if (retmode != RETURN_BEGIN && target != const0_rtx)
3906 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3907 /* stpcpy pointer to last byte. */
3908 if (retmode == RETURN_END_MINUS_ONE)
3909 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3912 return dest_addr;
3915 static rtx
3916 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3917 rtx target, tree orig_exp, memop_ret retmode)
3919 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3920 retmode);
3923 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3924 we failed, the caller should emit a normal call, otherwise try to
3925 get the result in TARGET, if convenient.
3926 Return value is based on RETMODE argument. */
3928 static rtx
3929 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3931 struct expand_operand ops[3];
3932 rtx dest_mem;
3933 rtx src_mem;
3935 if (!targetm.have_movstr ())
3936 return NULL_RTX;
3938 dest_mem = get_memory_rtx (dest, NULL);
3939 src_mem = get_memory_rtx (src, NULL);
3940 if (retmode == RETURN_BEGIN)
3942 target = force_reg (Pmode, XEXP (dest_mem, 0));
3943 dest_mem = replace_equiv_address (dest_mem, target);
3946 create_output_operand (&ops[0],
3947 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3948 create_fixed_operand (&ops[1], dest_mem);
3949 create_fixed_operand (&ops[2], src_mem);
3950 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3951 return NULL_RTX;
3953 if (retmode != RETURN_BEGIN && target != const0_rtx)
3955 target = ops[0].value;
3956 /* movstr is supposed to set end to the address of the NUL
3957 terminator. If the caller requested a mempcpy-like return value,
3958 adjust it. */
3959 if (retmode == RETURN_END)
3961 rtx tem = plus_constant (GET_MODE (target),
3962 gen_lowpart (GET_MODE (target), target), 1);
3963 emit_move_insn (target, force_operand (tem, NULL_RTX));
3966 return target;
3969 /* Do some very basic size validation of a call to the strcpy builtin
3970 given by EXP. Return NULL_RTX to have the built-in expand to a call
3971 to the library function. */
3973 static rtx
3974 expand_builtin_strcat (tree exp, rtx)
3976 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3977 || !warn_stringop_overflow)
3978 return NULL_RTX;
3980 tree dest = CALL_EXPR_ARG (exp, 0);
3981 tree src = CALL_EXPR_ARG (exp, 1);
3983 /* There is no way here to determine the length of the string in
3984 the destination to which the SRC string is being appended so
3985 just diagnose cases when the souce string is longer than
3986 the destination object. */
3988 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3990 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3991 destsize);
3993 return NULL_RTX;
3996 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3997 NULL_RTX if we failed the caller should emit a normal call, otherwise
3998 try to get the result in TARGET, if convenient (and in mode MODE if that's
3999 convenient). */
4001 static rtx
4002 expand_builtin_strcpy (tree exp, rtx target)
4004 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4005 return NULL_RTX;
4007 tree dest = CALL_EXPR_ARG (exp, 0);
4008 tree src = CALL_EXPR_ARG (exp, 1);
4010 if (warn_stringop_overflow)
4012 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4013 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4014 src, destsize);
4017 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4019 /* Check to see if the argument was declared attribute nonstring
4020 and if so, issue a warning since at this point it's not known
4021 to be nul-terminated. */
4022 tree fndecl = get_callee_fndecl (exp);
4023 maybe_warn_nonstring_arg (fndecl, exp);
4024 return ret;
4027 return NULL_RTX;
4030 /* Helper function to do the actual work for expand_builtin_strcpy. The
4031 arguments to the builtin_strcpy call DEST and SRC are broken out
4032 so that this can also be called without constructing an actual CALL_EXPR.
4033 The other arguments and return value are the same as for
4034 expand_builtin_strcpy. */
4036 static rtx
4037 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4039 /* Detect strcpy calls with unterminated arrays.. */
4040 if (tree nonstr = unterminated_array (src))
4042 /* NONSTR refers to the non-nul terminated constant array. */
4043 if (!TREE_NO_WARNING (exp))
4044 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4045 return NULL_RTX;
4048 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4051 /* Expand a call EXP to the stpcpy builtin.
4052 Return NULL_RTX if we failed the caller should emit a normal call,
4053 otherwise try to get the result in TARGET, if convenient (and in
4054 mode MODE if that's convenient). */
4056 static rtx
4057 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4059 tree dst, src;
4060 location_t loc = EXPR_LOCATION (exp);
4062 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4063 return NULL_RTX;
4065 dst = CALL_EXPR_ARG (exp, 0);
4066 src = CALL_EXPR_ARG (exp, 1);
4068 if (warn_stringop_overflow)
4070 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4071 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4072 src, destsize);
4075 /* If return value is ignored, transform stpcpy into strcpy. */
4076 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4078 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4079 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4080 return expand_expr (result, target, mode, EXPAND_NORMAL);
4082 else
4084 tree len, lenp1;
4085 rtx ret;
4087 /* Ensure we get an actual string whose length can be evaluated at
4088 compile-time, not an expression containing a string. This is
4089 because the latter will potentially produce pessimized code
4090 when used to produce the return value. */
4091 c_strlen_data lendata = { };
4092 if (!c_getstr (src, NULL)
4093 || !(len = c_strlen (src, 0, &lendata, 1)))
4094 return expand_movstr (dst, src, target,
4095 /*retmode=*/ RETURN_END_MINUS_ONE);
4097 if (lendata.decl && !TREE_NO_WARNING (exp))
4098 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4100 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4101 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4102 target, exp,
4103 /*retmode=*/ RETURN_END_MINUS_ONE);
4105 if (ret)
4106 return ret;
4108 if (TREE_CODE (len) == INTEGER_CST)
4110 rtx len_rtx = expand_normal (len);
4112 if (CONST_INT_P (len_rtx))
4114 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4116 if (ret)
4118 if (! target)
4120 if (mode != VOIDmode)
4121 target = gen_reg_rtx (mode);
4122 else
4123 target = gen_reg_rtx (GET_MODE (ret));
4125 if (GET_MODE (target) != GET_MODE (ret))
4126 ret = gen_lowpart (GET_MODE (target), ret);
4128 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4129 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4130 gcc_assert (ret);
4132 return target;
4137 return expand_movstr (dst, src, target,
4138 /*retmode=*/ RETURN_END_MINUS_ONE);
4142 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4143 arguments while being careful to avoid duplicate warnings (which could
4144 be issued if the expander were to expand the call, resulting in it
4145 being emitted in expand_call(). */
4147 static rtx
4148 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4150 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4152 /* The call has been successfully expanded. Check for nonstring
4153 arguments and issue warnings as appropriate. */
4154 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4155 return ret;
4158 return NULL_RTX;
4161 /* Check a call EXP to the stpncpy built-in for validity.
4162 Return NULL_RTX on both success and failure. */
4164 static rtx
4165 expand_builtin_stpncpy (tree exp, rtx)
4167 if (!validate_arglist (exp,
4168 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4169 || !warn_stringop_overflow)
4170 return NULL_RTX;
4172 /* The source and destination of the call. */
4173 tree dest = CALL_EXPR_ARG (exp, 0);
4174 tree src = CALL_EXPR_ARG (exp, 1);
4176 /* The exact number of bytes to write (not the maximum). */
4177 tree len = CALL_EXPR_ARG (exp, 2);
4179 /* The size of the destination object. */
4180 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4182 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4184 return NULL_RTX;
4187 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4188 bytes from constant string DATA + OFFSET and return it as target
4189 constant. */
4192 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4193 scalar_int_mode mode)
4195 const char *str = (const char *) data;
4197 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4198 return const0_rtx;
4200 return c_readstr (str + offset, mode);
4203 /* Helper to check the sizes of sequences and the destination of calls
4204 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4205 success (no overflow or invalid sizes), false otherwise. */
4207 static bool
4208 check_strncat_sizes (tree exp, tree objsize)
4210 tree dest = CALL_EXPR_ARG (exp, 0);
4211 tree src = CALL_EXPR_ARG (exp, 1);
4212 tree maxread = CALL_EXPR_ARG (exp, 2);
4214 /* Try to determine the range of lengths that the source expression
4215 refers to. */
4216 c_strlen_data lendata = { };
4217 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4219 /* Try to verify that the destination is big enough for the shortest
4220 string. */
4222 if (!objsize && warn_stringop_overflow)
4224 /* If it hasn't been provided by __strncat_chk, try to determine
4225 the size of the destination object into which the source is
4226 being copied. */
4227 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4230 /* Add one for the terminating nul. */
4231 tree srclen = (lendata.minlen
4232 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4233 size_one_node)
4234 : NULL_TREE);
4236 /* The strncat function copies at most MAXREAD bytes and always appends
4237 the terminating nul so the specified upper bound should never be equal
4238 to (or greater than) the size of the destination. */
4239 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4240 && tree_int_cst_equal (objsize, maxread))
4242 location_t loc = tree_nonartificial_location (exp);
4243 loc = expansion_point_location_if_in_system_header (loc);
4245 warning_at (loc, OPT_Wstringop_overflow_,
4246 "%K%qD specified bound %E equals destination size",
4247 exp, get_callee_fndecl (exp), maxread);
4249 return false;
4252 if (!srclen
4253 || (maxread && tree_fits_uhwi_p (maxread)
4254 && tree_fits_uhwi_p (srclen)
4255 && tree_int_cst_lt (maxread, srclen)))
4256 srclen = maxread;
4258 /* The number of bytes to write is LEN but check_access will also
4259 check SRCLEN if LEN's value isn't known. */
4260 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4261 objsize);
4264 /* Similar to expand_builtin_strcat, do some very basic size validation
4265 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4266 the built-in expand to a call to the library function. */
4268 static rtx
4269 expand_builtin_strncat (tree exp, rtx)
4271 if (!validate_arglist (exp,
4272 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4273 || !warn_stringop_overflow)
4274 return NULL_RTX;
4276 tree dest = CALL_EXPR_ARG (exp, 0);
4277 tree src = CALL_EXPR_ARG (exp, 1);
4278 /* The upper bound on the number of bytes to write. */
4279 tree maxread = CALL_EXPR_ARG (exp, 2);
4280 /* The length of the source sequence. */
4281 tree slen = c_strlen (src, 1);
4283 /* Try to determine the range of lengths that the source expression
4284 refers to. Since the lengths are only used for warning and not
4285 for code generation disable strict mode below. */
4286 tree maxlen = slen;
4287 if (!maxlen)
4289 c_strlen_data lendata = { };
4290 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4291 maxlen = lendata.maxbound;
4294 /* Try to verify that the destination is big enough for the shortest
4295 string. First try to determine the size of the destination object
4296 into which the source is being copied. */
4297 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4299 /* Add one for the terminating nul. */
4300 tree srclen = (maxlen
4301 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4302 size_one_node)
4303 : NULL_TREE);
4305 /* The strncat function copies at most MAXREAD bytes and always appends
4306 the terminating nul so the specified upper bound should never be equal
4307 to (or greater than) the size of the destination. */
4308 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4309 && tree_int_cst_equal (destsize, maxread))
4311 location_t loc = tree_nonartificial_location (exp);
4312 loc = expansion_point_location_if_in_system_header (loc);
4314 warning_at (loc, OPT_Wstringop_overflow_,
4315 "%K%qD specified bound %E equals destination size",
4316 exp, get_callee_fndecl (exp), maxread);
4318 return NULL_RTX;
4321 if (!srclen
4322 || (maxread && tree_fits_uhwi_p (maxread)
4323 && tree_fits_uhwi_p (srclen)
4324 && tree_int_cst_lt (maxread, srclen)))
4325 srclen = maxread;
4327 /* The number of bytes to write is SRCLEN. */
4328 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4330 return NULL_RTX;
4333 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4334 NULL_RTX if we failed the caller should emit a normal call. */
4336 static rtx
4337 expand_builtin_strncpy (tree exp, rtx target)
4339 location_t loc = EXPR_LOCATION (exp);
4341 if (validate_arglist (exp,
4342 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4344 tree dest = CALL_EXPR_ARG (exp, 0);
4345 tree src = CALL_EXPR_ARG (exp, 1);
4346 /* The number of bytes to write (not the maximum). */
4347 tree len = CALL_EXPR_ARG (exp, 2);
4348 /* The length of the source sequence. */
4349 tree slen = c_strlen (src, 1);
4351 if (warn_stringop_overflow)
4353 tree destsize = compute_objsize (dest,
4354 warn_stringop_overflow - 1);
4356 /* The number of bytes to write is LEN but check_access will also
4357 check SLEN if LEN's value isn't known. */
4358 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4359 destsize);
4362 /* We must be passed a constant len and src parameter. */
4363 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4364 return NULL_RTX;
4366 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4368 /* We're required to pad with trailing zeros if the requested
4369 len is greater than strlen(s2)+1. In that case try to
4370 use store_by_pieces, if it fails, punt. */
4371 if (tree_int_cst_lt (slen, len))
4373 unsigned int dest_align = get_pointer_alignment (dest);
4374 const char *p = c_getstr (src);
4375 rtx dest_mem;
4377 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4378 || !can_store_by_pieces (tree_to_uhwi (len),
4379 builtin_strncpy_read_str,
4380 CONST_CAST (char *, p),
4381 dest_align, false))
4382 return NULL_RTX;
4384 dest_mem = get_memory_rtx (dest, len);
4385 store_by_pieces (dest_mem, tree_to_uhwi (len),
4386 builtin_strncpy_read_str,
4387 CONST_CAST (char *, p), dest_align, false,
4388 RETURN_BEGIN);
4389 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4390 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4391 return dest_mem;
4394 return NULL_RTX;
4397 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4398 bytes from constant string DATA + OFFSET and return it as target
4399 constant. */
4402 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4403 scalar_int_mode mode)
4405 const char *c = (const char *) data;
4406 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4408 memset (p, *c, GET_MODE_SIZE (mode));
4410 return c_readstr (p, mode);
4413 /* Callback routine for store_by_pieces. Return the RTL of a register
4414 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4415 char value given in the RTL register data. For example, if mode is
4416 4 bytes wide, return the RTL for 0x01010101*data. */
4418 static rtx
4419 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4420 scalar_int_mode mode)
4422 rtx target, coeff;
4423 size_t size;
4424 char *p;
4426 size = GET_MODE_SIZE (mode);
4427 if (size == 1)
4428 return (rtx) data;
4430 p = XALLOCAVEC (char, size);
4431 memset (p, 1, size);
4432 coeff = c_readstr (p, mode);
4434 target = convert_to_mode (mode, (rtx) data, 1);
4435 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4436 return force_reg (mode, target);
4439 /* Expand expression EXP, which is a call to the memset builtin. Return
4440 NULL_RTX if we failed the caller should emit a normal call, otherwise
4441 try to get the result in TARGET, if convenient (and in mode MODE if that's
4442 convenient). */
4444 static rtx
4445 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4447 if (!validate_arglist (exp,
4448 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4449 return NULL_RTX;
4451 tree dest = CALL_EXPR_ARG (exp, 0);
4452 tree val = CALL_EXPR_ARG (exp, 1);
4453 tree len = CALL_EXPR_ARG (exp, 2);
4455 check_memop_access (exp, dest, NULL_TREE, len);
4457 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4460 /* Helper function to do the actual work for expand_builtin_memset. The
4461 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4462 so that this can also be called without constructing an actual CALL_EXPR.
4463 The other arguments and return value are the same as for
4464 expand_builtin_memset. */
4466 static rtx
4467 expand_builtin_memset_args (tree dest, tree val, tree len,
4468 rtx target, machine_mode mode, tree orig_exp)
4470 tree fndecl, fn;
4471 enum built_in_function fcode;
4472 machine_mode val_mode;
4473 char c;
4474 unsigned int dest_align;
4475 rtx dest_mem, dest_addr, len_rtx;
4476 HOST_WIDE_INT expected_size = -1;
4477 unsigned int expected_align = 0;
4478 unsigned HOST_WIDE_INT min_size;
4479 unsigned HOST_WIDE_INT max_size;
4480 unsigned HOST_WIDE_INT probable_max_size;
4482 dest_align = get_pointer_alignment (dest);
4484 /* If DEST is not a pointer type, don't do this operation in-line. */
4485 if (dest_align == 0)
4486 return NULL_RTX;
4488 if (currently_expanding_gimple_stmt)
4489 stringop_block_profile (currently_expanding_gimple_stmt,
4490 &expected_align, &expected_size);
4492 if (expected_align < dest_align)
4493 expected_align = dest_align;
4495 /* If the LEN parameter is zero, return DEST. */
4496 if (integer_zerop (len))
4498 /* Evaluate and ignore VAL in case it has side-effects. */
4499 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4500 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4503 /* Stabilize the arguments in case we fail. */
4504 dest = builtin_save_expr (dest);
4505 val = builtin_save_expr (val);
4506 len = builtin_save_expr (len);
4508 len_rtx = expand_normal (len);
4509 determine_block_size (len, len_rtx, &min_size, &max_size,
4510 &probable_max_size);
4511 dest_mem = get_memory_rtx (dest, len);
4512 val_mode = TYPE_MODE (unsigned_char_type_node);
4514 if (TREE_CODE (val) != INTEGER_CST)
4516 rtx val_rtx;
4518 val_rtx = expand_normal (val);
4519 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4521 /* Assume that we can memset by pieces if we can store
4522 * the coefficients by pieces (in the required modes).
4523 * We can't pass builtin_memset_gen_str as that emits RTL. */
4524 c = 1;
4525 if (tree_fits_uhwi_p (len)
4526 && can_store_by_pieces (tree_to_uhwi (len),
4527 builtin_memset_read_str, &c, dest_align,
4528 true))
4530 val_rtx = force_reg (val_mode, val_rtx);
4531 store_by_pieces (dest_mem, tree_to_uhwi (len),
4532 builtin_memset_gen_str, val_rtx, dest_align,
4533 true, RETURN_BEGIN);
4535 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4536 dest_align, expected_align,
4537 expected_size, min_size, max_size,
4538 probable_max_size))
4539 goto do_libcall;
4541 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4542 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4543 return dest_mem;
4546 if (target_char_cast (val, &c))
4547 goto do_libcall;
4549 if (c)
4551 if (tree_fits_uhwi_p (len)
4552 && can_store_by_pieces (tree_to_uhwi (len),
4553 builtin_memset_read_str, &c, dest_align,
4554 true))
4555 store_by_pieces (dest_mem, tree_to_uhwi (len),
4556 builtin_memset_read_str, &c, dest_align, true,
4557 RETURN_BEGIN);
4558 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4559 gen_int_mode (c, val_mode),
4560 dest_align, expected_align,
4561 expected_size, min_size, max_size,
4562 probable_max_size))
4563 goto do_libcall;
4565 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4566 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4567 return dest_mem;
4570 set_mem_align (dest_mem, dest_align);
4571 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4572 CALL_EXPR_TAILCALL (orig_exp)
4573 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4574 expected_align, expected_size,
4575 min_size, max_size,
4576 probable_max_size);
4578 if (dest_addr == 0)
4580 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4581 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4584 return dest_addr;
4586 do_libcall:
4587 fndecl = get_callee_fndecl (orig_exp);
4588 fcode = DECL_FUNCTION_CODE (fndecl);
4589 if (fcode == BUILT_IN_MEMSET)
4590 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4591 dest, val, len);
4592 else if (fcode == BUILT_IN_BZERO)
4593 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4594 dest, len);
4595 else
4596 gcc_unreachable ();
4597 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4598 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4599 return expand_call (fn, target, target == const0_rtx);
4602 /* Expand expression EXP, which is a call to the bzero builtin. Return
4603 NULL_RTX if we failed the caller should emit a normal call. */
4605 static rtx
4606 expand_builtin_bzero (tree exp)
4608 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4609 return NULL_RTX;
4611 tree dest = CALL_EXPR_ARG (exp, 0);
4612 tree size = CALL_EXPR_ARG (exp, 1);
4614 check_memop_access (exp, dest, NULL_TREE, size);
4616 /* New argument list transforming bzero(ptr x, int y) to
4617 memset(ptr x, int 0, size_t y). This is done this way
4618 so that if it isn't expanded inline, we fallback to
4619 calling bzero instead of memset. */
4621 location_t loc = EXPR_LOCATION (exp);
4623 return expand_builtin_memset_args (dest, integer_zero_node,
4624 fold_convert_loc (loc,
4625 size_type_node, size),
4626 const0_rtx, VOIDmode, exp);
4629 /* Try to expand cmpstr operation ICODE with the given operands.
4630 Return the result rtx on success, otherwise return null. */
4632 static rtx
4633 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4634 HOST_WIDE_INT align)
4636 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4638 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4639 target = NULL_RTX;
4641 struct expand_operand ops[4];
4642 create_output_operand (&ops[0], target, insn_mode);
4643 create_fixed_operand (&ops[1], arg1_rtx);
4644 create_fixed_operand (&ops[2], arg2_rtx);
4645 create_integer_operand (&ops[3], align);
4646 if (maybe_expand_insn (icode, 4, ops))
4647 return ops[0].value;
4648 return NULL_RTX;
4651 /* Expand expression EXP, which is a call to the memcmp built-in function.
4652 Return NULL_RTX if we failed and the caller should emit a normal call,
4653 otherwise try to get the result in TARGET, if convenient.
4654 RESULT_EQ is true if we can relax the returned value to be either zero
4655 or nonzero, without caring about the sign. */
4657 static rtx
4658 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4660 if (!validate_arglist (exp,
4661 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4662 return NULL_RTX;
4664 tree arg1 = CALL_EXPR_ARG (exp, 0);
4665 tree arg2 = CALL_EXPR_ARG (exp, 1);
4666 tree len = CALL_EXPR_ARG (exp, 2);
4667 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4668 bool no_overflow = true;
4670 /* Diagnose calls where the specified length exceeds the size of either
4671 object. */
4672 tree size = compute_objsize (arg1, 0);
4673 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4674 len, /*maxread=*/NULL_TREE, size,
4675 /*objsize=*/NULL_TREE);
4676 if (no_overflow)
4678 size = compute_objsize (arg2, 0);
4679 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4680 len, /*maxread=*/NULL_TREE, size,
4681 /*objsize=*/NULL_TREE);
4684 /* If the specified length exceeds the size of either object,
4685 call the function. */
4686 if (!no_overflow)
4687 return NULL_RTX;
4689 /* Due to the performance benefit, always inline the calls first
4690 when result_eq is false. */
4691 rtx result = NULL_RTX;
4693 if (!result_eq && fcode != BUILT_IN_BCMP)
4695 result = inline_expand_builtin_string_cmp (exp, target);
4696 if (result)
4697 return result;
4700 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4701 location_t loc = EXPR_LOCATION (exp);
4703 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4704 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4706 /* If we don't have POINTER_TYPE, call the function. */
4707 if (arg1_align == 0 || arg2_align == 0)
4708 return NULL_RTX;
4710 rtx arg1_rtx = get_memory_rtx (arg1, len);
4711 rtx arg2_rtx = get_memory_rtx (arg2, len);
4712 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4714 /* Set MEM_SIZE as appropriate. */
4715 if (CONST_INT_P (len_rtx))
4717 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4718 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4721 by_pieces_constfn constfn = NULL;
4723 const char *src_str = c_getstr (arg2);
4724 if (result_eq && src_str == NULL)
4726 src_str = c_getstr (arg1);
4727 if (src_str != NULL)
4728 std::swap (arg1_rtx, arg2_rtx);
4731 /* If SRC is a string constant and block move would be done
4732 by pieces, we can avoid loading the string from memory
4733 and only stored the computed constants. */
4734 if (src_str
4735 && CONST_INT_P (len_rtx)
4736 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4737 constfn = builtin_memcpy_read_str;
4739 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4740 TREE_TYPE (len), target,
4741 result_eq, constfn,
4742 CONST_CAST (char *, src_str));
4744 if (result)
4746 /* Return the value in the proper mode for this function. */
4747 if (GET_MODE (result) == mode)
4748 return result;
4750 if (target != 0)
4752 convert_move (target, result, 0);
4753 return target;
4756 return convert_to_mode (mode, result, 0);
4759 return NULL_RTX;
4762 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4763 if we failed the caller should emit a normal call, otherwise try to get
4764 the result in TARGET, if convenient. */
4766 static rtx
4767 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4769 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4770 return NULL_RTX;
4772 /* Due to the performance benefit, always inline the calls first. */
4773 rtx result = NULL_RTX;
4774 result = inline_expand_builtin_string_cmp (exp, target);
4775 if (result)
4776 return result;
4778 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4779 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4780 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4781 return NULL_RTX;
4783 tree arg1 = CALL_EXPR_ARG (exp, 0);
4784 tree arg2 = CALL_EXPR_ARG (exp, 1);
4786 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4787 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4789 /* If we don't have POINTER_TYPE, call the function. */
4790 if (arg1_align == 0 || arg2_align == 0)
4791 return NULL_RTX;
4793 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4794 arg1 = builtin_save_expr (arg1);
4795 arg2 = builtin_save_expr (arg2);
4797 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4798 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4800 /* Try to call cmpstrsi. */
4801 if (cmpstr_icode != CODE_FOR_nothing)
4802 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4803 MIN (arg1_align, arg2_align));
4805 /* Try to determine at least one length and call cmpstrnsi. */
4806 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4808 tree len;
4809 rtx arg3_rtx;
4811 tree len1 = c_strlen (arg1, 1);
4812 tree len2 = c_strlen (arg2, 1);
4814 if (len1)
4815 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4816 if (len2)
4817 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4819 /* If we don't have a constant length for the first, use the length
4820 of the second, if we know it. We don't require a constant for
4821 this case; some cost analysis could be done if both are available
4822 but neither is constant. For now, assume they're equally cheap,
4823 unless one has side effects. If both strings have constant lengths,
4824 use the smaller. */
4826 if (!len1)
4827 len = len2;
4828 else if (!len2)
4829 len = len1;
4830 else if (TREE_SIDE_EFFECTS (len1))
4831 len = len2;
4832 else if (TREE_SIDE_EFFECTS (len2))
4833 len = len1;
4834 else if (TREE_CODE (len1) != INTEGER_CST)
4835 len = len2;
4836 else if (TREE_CODE (len2) != INTEGER_CST)
4837 len = len1;
4838 else if (tree_int_cst_lt (len1, len2))
4839 len = len1;
4840 else
4841 len = len2;
4843 /* If both arguments have side effects, we cannot optimize. */
4844 if (len && !TREE_SIDE_EFFECTS (len))
4846 arg3_rtx = expand_normal (len);
4847 result = expand_cmpstrn_or_cmpmem
4848 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4849 arg3_rtx, MIN (arg1_align, arg2_align));
4853 tree fndecl = get_callee_fndecl (exp);
4854 if (result)
4856 /* Check to see if the argument was declared attribute nonstring
4857 and if so, issue a warning since at this point it's not known
4858 to be nul-terminated. */
4859 maybe_warn_nonstring_arg (fndecl, exp);
4861 /* Return the value in the proper mode for this function. */
4862 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4863 if (GET_MODE (result) == mode)
4864 return result;
4865 if (target == 0)
4866 return convert_to_mode (mode, result, 0);
4867 convert_move (target, result, 0);
4868 return target;
4871 /* Expand the library call ourselves using a stabilized argument
4872 list to avoid re-evaluating the function's arguments twice. */
4873 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4874 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4875 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4876 return expand_call (fn, target, target == const0_rtx);
4879 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4880 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4881 the result in TARGET, if convenient. */
4883 static rtx
4884 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4885 ATTRIBUTE_UNUSED machine_mode mode)
4887 if (!validate_arglist (exp,
4888 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4889 return NULL_RTX;
4891 /* Due to the performance benefit, always inline the calls first. */
4892 rtx result = NULL_RTX;
4893 result = inline_expand_builtin_string_cmp (exp, target);
4894 if (result)
4895 return result;
4897 /* If c_strlen can determine an expression for one of the string
4898 lengths, and it doesn't have side effects, then emit cmpstrnsi
4899 using length MIN(strlen(string)+1, arg3). */
4900 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4901 if (cmpstrn_icode == CODE_FOR_nothing)
4902 return NULL_RTX;
4904 tree len;
4906 tree arg1 = CALL_EXPR_ARG (exp, 0);
4907 tree arg2 = CALL_EXPR_ARG (exp, 1);
4908 tree arg3 = CALL_EXPR_ARG (exp, 2);
4910 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4911 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4913 tree len1 = c_strlen (arg1, 1);
4914 tree len2 = c_strlen (arg2, 1);
4916 location_t loc = EXPR_LOCATION (exp);
4918 if (len1)
4919 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4920 if (len2)
4921 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4923 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4925 /* If we don't have a constant length for the first, use the length
4926 of the second, if we know it. If neither string is constant length,
4927 use the given length argument. We don't require a constant for
4928 this case; some cost analysis could be done if both are available
4929 but neither is constant. For now, assume they're equally cheap,
4930 unless one has side effects. If both strings have constant lengths,
4931 use the smaller. */
4933 if (!len1 && !len2)
4934 len = len3;
4935 else if (!len1)
4936 len = len2;
4937 else if (!len2)
4938 len = len1;
4939 else if (TREE_SIDE_EFFECTS (len1))
4940 len = len2;
4941 else if (TREE_SIDE_EFFECTS (len2))
4942 len = len1;
4943 else if (TREE_CODE (len1) != INTEGER_CST)
4944 len = len2;
4945 else if (TREE_CODE (len2) != INTEGER_CST)
4946 len = len1;
4947 else if (tree_int_cst_lt (len1, len2))
4948 len = len1;
4949 else
4950 len = len2;
4952 /* If we are not using the given length, we must incorporate it here.
4953 The actual new length parameter will be MIN(len,arg3) in this case. */
4954 if (len != len3)
4956 len = fold_convert_loc (loc, sizetype, len);
4957 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4959 rtx arg1_rtx = get_memory_rtx (arg1, len);
4960 rtx arg2_rtx = get_memory_rtx (arg2, len);
4961 rtx arg3_rtx = expand_normal (len);
4962 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4963 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4964 MIN (arg1_align, arg2_align));
4966 tree fndecl = get_callee_fndecl (exp);
4967 if (result)
4969 /* Check to see if the argument was declared attribute nonstring
4970 and if so, issue a warning since at this point it's not known
4971 to be nul-terminated. */
4972 maybe_warn_nonstring_arg (fndecl, exp);
4974 /* Return the value in the proper mode for this function. */
4975 mode = TYPE_MODE (TREE_TYPE (exp));
4976 if (GET_MODE (result) == mode)
4977 return result;
4978 if (target == 0)
4979 return convert_to_mode (mode, result, 0);
4980 convert_move (target, result, 0);
4981 return target;
4984 /* Expand the library call ourselves using a stabilized argument
4985 list to avoid re-evaluating the function's arguments twice. */
4986 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4987 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4988 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4989 return expand_call (fn, target, target == const0_rtx);
4992 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4993 if that's convenient. */
4996 expand_builtin_saveregs (void)
4998 rtx val;
4999 rtx_insn *seq;
5001 /* Don't do __builtin_saveregs more than once in a function.
5002 Save the result of the first call and reuse it. */
5003 if (saveregs_value != 0)
5004 return saveregs_value;
5006 /* When this function is called, it means that registers must be
5007 saved on entry to this function. So we migrate the call to the
5008 first insn of this function. */
5010 start_sequence ();
5012 /* Do whatever the machine needs done in this case. */
5013 val = targetm.calls.expand_builtin_saveregs ();
5015 seq = get_insns ();
5016 end_sequence ();
5018 saveregs_value = val;
5020 /* Put the insns after the NOTE that starts the function. If this
5021 is inside a start_sequence, make the outer-level insn chain current, so
5022 the code is placed at the start of the function. */
5023 push_topmost_sequence ();
5024 emit_insn_after (seq, entry_of_function ());
5025 pop_topmost_sequence ();
5027 return val;
5030 /* Expand a call to __builtin_next_arg. */
5032 static rtx
5033 expand_builtin_next_arg (void)
5035 /* Checking arguments is already done in fold_builtin_next_arg
5036 that must be called before this function. */
5037 return expand_binop (ptr_mode, add_optab,
5038 crtl->args.internal_arg_pointer,
5039 crtl->args.arg_offset_rtx,
5040 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5043 /* Make it easier for the backends by protecting the valist argument
5044 from multiple evaluations. */
5046 static tree
5047 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5049 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5051 /* The current way of determining the type of valist is completely
5052 bogus. We should have the information on the va builtin instead. */
5053 if (!vatype)
5054 vatype = targetm.fn_abi_va_list (cfun->decl);
5056 if (TREE_CODE (vatype) == ARRAY_TYPE)
5058 if (TREE_SIDE_EFFECTS (valist))
5059 valist = save_expr (valist);
5061 /* For this case, the backends will be expecting a pointer to
5062 vatype, but it's possible we've actually been given an array
5063 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5064 So fix it. */
5065 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5067 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5068 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5071 else
5073 tree pt = build_pointer_type (vatype);
5075 if (! needs_lvalue)
5077 if (! TREE_SIDE_EFFECTS (valist))
5078 return valist;
5080 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5081 TREE_SIDE_EFFECTS (valist) = 1;
5084 if (TREE_SIDE_EFFECTS (valist))
5085 valist = save_expr (valist);
5086 valist = fold_build2_loc (loc, MEM_REF,
5087 vatype, valist, build_int_cst (pt, 0));
5090 return valist;
5093 /* The "standard" definition of va_list is void*. */
5095 tree
5096 std_build_builtin_va_list (void)
5098 return ptr_type_node;
5101 /* The "standard" abi va_list is va_list_type_node. */
5103 tree
5104 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5106 return va_list_type_node;
5109 /* The "standard" type of va_list is va_list_type_node. */
5111 tree
5112 std_canonical_va_list_type (tree type)
5114 tree wtype, htype;
5116 wtype = va_list_type_node;
5117 htype = type;
5119 if (TREE_CODE (wtype) == ARRAY_TYPE)
5121 /* If va_list is an array type, the argument may have decayed
5122 to a pointer type, e.g. by being passed to another function.
5123 In that case, unwrap both types so that we can compare the
5124 underlying records. */
5125 if (TREE_CODE (htype) == ARRAY_TYPE
5126 || POINTER_TYPE_P (htype))
5128 wtype = TREE_TYPE (wtype);
5129 htype = TREE_TYPE (htype);
5132 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5133 return va_list_type_node;
5135 return NULL_TREE;
5138 /* The "standard" implementation of va_start: just assign `nextarg' to
5139 the variable. */
5141 void
5142 std_expand_builtin_va_start (tree valist, rtx nextarg)
5144 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5145 convert_move (va_r, nextarg, 0);
5148 /* Expand EXP, a call to __builtin_va_start. */
5150 static rtx
5151 expand_builtin_va_start (tree exp)
5153 rtx nextarg;
5154 tree valist;
5155 location_t loc = EXPR_LOCATION (exp);
5157 if (call_expr_nargs (exp) < 2)
5159 error_at (loc, "too few arguments to function %<va_start%>");
5160 return const0_rtx;
5163 if (fold_builtin_next_arg (exp, true))
5164 return const0_rtx;
5166 nextarg = expand_builtin_next_arg ();
5167 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5169 if (targetm.expand_builtin_va_start)
5170 targetm.expand_builtin_va_start (valist, nextarg);
5171 else
5172 std_expand_builtin_va_start (valist, nextarg);
5174 return const0_rtx;
5177 /* Expand EXP, a call to __builtin_va_end. */
5179 static rtx
5180 expand_builtin_va_end (tree exp)
5182 tree valist = CALL_EXPR_ARG (exp, 0);
5184 /* Evaluate for side effects, if needed. I hate macros that don't
5185 do that. */
5186 if (TREE_SIDE_EFFECTS (valist))
5187 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5189 return const0_rtx;
5192 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5193 builtin rather than just as an assignment in stdarg.h because of the
5194 nastiness of array-type va_list types. */
5196 static rtx
5197 expand_builtin_va_copy (tree exp)
5199 tree dst, src, t;
5200 location_t loc = EXPR_LOCATION (exp);
5202 dst = CALL_EXPR_ARG (exp, 0);
5203 src = CALL_EXPR_ARG (exp, 1);
5205 dst = stabilize_va_list_loc (loc, dst, 1);
5206 src = stabilize_va_list_loc (loc, src, 0);
5208 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5210 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5212 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5213 TREE_SIDE_EFFECTS (t) = 1;
5214 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5216 else
5218 rtx dstb, srcb, size;
5220 /* Evaluate to pointers. */
5221 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5222 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5223 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5224 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5226 dstb = convert_memory_address (Pmode, dstb);
5227 srcb = convert_memory_address (Pmode, srcb);
5229 /* "Dereference" to BLKmode memories. */
5230 dstb = gen_rtx_MEM (BLKmode, dstb);
5231 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5232 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5233 srcb = gen_rtx_MEM (BLKmode, srcb);
5234 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5235 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5237 /* Copy. */
5238 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5241 return const0_rtx;
5244 /* Expand a call to one of the builtin functions __builtin_frame_address or
5245 __builtin_return_address. */
5247 static rtx
5248 expand_builtin_frame_address (tree fndecl, tree exp)
5250 /* The argument must be a nonnegative integer constant.
5251 It counts the number of frames to scan up the stack.
5252 The value is either the frame pointer value or the return
5253 address saved in that frame. */
5254 if (call_expr_nargs (exp) == 0)
5255 /* Warning about missing arg was already issued. */
5256 return const0_rtx;
5257 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5259 error ("invalid argument to %qD", fndecl);
5260 return const0_rtx;
5262 else
5264 /* Number of frames to scan up the stack. */
5265 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5267 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5269 /* Some ports cannot access arbitrary stack frames. */
5270 if (tem == NULL)
5272 warning (0, "unsupported argument to %qD", fndecl);
5273 return const0_rtx;
5276 if (count)
5278 /* Warn since no effort is made to ensure that any frame
5279 beyond the current one exists or can be safely reached. */
5280 warning (OPT_Wframe_address, "calling %qD with "
5281 "a nonzero argument is unsafe", fndecl);
5284 /* For __builtin_frame_address, return what we've got. */
5285 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5286 return tem;
5288 if (!REG_P (tem)
5289 && ! CONSTANT_P (tem))
5290 tem = copy_addr_to_reg (tem);
5291 return tem;
5295 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5296 failed and the caller should emit a normal call. */
5298 static rtx
5299 expand_builtin_alloca (tree exp)
5301 rtx op0;
5302 rtx result;
5303 unsigned int align;
5304 tree fndecl = get_callee_fndecl (exp);
5305 HOST_WIDE_INT max_size;
5306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5307 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5308 bool valid_arglist
5309 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5310 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5311 VOID_TYPE)
5312 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5313 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5314 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5316 if (!valid_arglist)
5317 return NULL_RTX;
5319 if ((alloca_for_var
5320 && warn_vla_limit >= HOST_WIDE_INT_MAX
5321 && warn_alloc_size_limit < warn_vla_limit)
5322 || (!alloca_for_var
5323 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5324 && warn_alloc_size_limit < warn_alloca_limit
5327 /* -Walloca-larger-than and -Wvla-larger-than settings of
5328 less than HOST_WIDE_INT_MAX override the more general
5329 -Walloc-size-larger-than so unless either of the former
5330 options is smaller than the last one (wchich would imply
5331 that the call was already checked), check the alloca
5332 arguments for overflow. */
5333 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5334 int idx[] = { 0, -1 };
5335 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5338 /* Compute the argument. */
5339 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5341 /* Compute the alignment. */
5342 align = (fcode == BUILT_IN_ALLOCA
5343 ? BIGGEST_ALIGNMENT
5344 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5346 /* Compute the maximum size. */
5347 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5348 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5349 : -1);
5351 /* Allocate the desired space. If the allocation stems from the declaration
5352 of a variable-sized object, it cannot accumulate. */
5353 result
5354 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5355 result = convert_memory_address (ptr_mode, result);
5357 return result;
5360 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5361 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5362 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5363 handle_builtin_stack_restore function. */
5365 static rtx
5366 expand_asan_emit_allocas_unpoison (tree exp)
5368 tree arg0 = CALL_EXPR_ARG (exp, 0);
5369 tree arg1 = CALL_EXPR_ARG (exp, 1);
5370 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5371 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5372 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5373 stack_pointer_rtx, NULL_RTX, 0,
5374 OPTAB_LIB_WIDEN);
5375 off = convert_modes (ptr_mode, Pmode, off, 0);
5376 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5377 OPTAB_LIB_WIDEN);
5378 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5379 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5380 top, ptr_mode, bot, ptr_mode);
5381 return ret;
5384 /* Expand a call to bswap builtin in EXP.
5385 Return NULL_RTX if a normal call should be emitted rather than expanding the
5386 function in-line. If convenient, the result should be placed in TARGET.
5387 SUBTARGET may be used as the target for computing one of EXP's operands. */
5389 static rtx
5390 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5391 rtx subtarget)
5393 tree arg;
5394 rtx op0;
5396 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5397 return NULL_RTX;
5399 arg = CALL_EXPR_ARG (exp, 0);
5400 op0 = expand_expr (arg,
5401 subtarget && GET_MODE (subtarget) == target_mode
5402 ? subtarget : NULL_RTX,
5403 target_mode, EXPAND_NORMAL);
5404 if (GET_MODE (op0) != target_mode)
5405 op0 = convert_to_mode (target_mode, op0, 1);
5407 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5409 gcc_assert (target);
5411 return convert_to_mode (target_mode, target, 1);
5414 /* Expand a call to a unary builtin in EXP.
5415 Return NULL_RTX if a normal call should be emitted rather than expanding the
5416 function in-line. If convenient, the result should be placed in TARGET.
5417 SUBTARGET may be used as the target for computing one of EXP's operands. */
5419 static rtx
5420 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5421 rtx subtarget, optab op_optab)
5423 rtx op0;
5425 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5426 return NULL_RTX;
5428 /* Compute the argument. */
5429 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5430 (subtarget
5431 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5432 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5433 VOIDmode, EXPAND_NORMAL);
5434 /* Compute op, into TARGET if possible.
5435 Set TARGET to wherever the result comes back. */
5436 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5437 op_optab, op0, target, op_optab != clrsb_optab);
5438 gcc_assert (target);
5440 return convert_to_mode (target_mode, target, 0);
5443 /* Expand a call to __builtin_expect. We just return our argument
5444 as the builtin_expect semantic should've been already executed by
5445 tree branch prediction pass. */
5447 static rtx
5448 expand_builtin_expect (tree exp, rtx target)
5450 tree arg;
5452 if (call_expr_nargs (exp) < 2)
5453 return const0_rtx;
5454 arg = CALL_EXPR_ARG (exp, 0);
5456 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5457 /* When guessing was done, the hints should be already stripped away. */
5458 gcc_assert (!flag_guess_branch_prob
5459 || optimize == 0 || seen_error ());
5460 return target;
5463 /* Expand a call to __builtin_expect_with_probability. We just return our
5464 argument as the builtin_expect semantic should've been already executed by
5465 tree branch prediction pass. */
5467 static rtx
5468 expand_builtin_expect_with_probability (tree exp, rtx target)
5470 tree arg;
5472 if (call_expr_nargs (exp) < 3)
5473 return const0_rtx;
5474 arg = CALL_EXPR_ARG (exp, 0);
5476 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5477 /* When guessing was done, the hints should be already stripped away. */
5478 gcc_assert (!flag_guess_branch_prob
5479 || optimize == 0 || seen_error ());
5480 return target;
5484 /* Expand a call to __builtin_assume_aligned. We just return our first
5485 argument as the builtin_assume_aligned semantic should've been already
5486 executed by CCP. */
5488 static rtx
5489 expand_builtin_assume_aligned (tree exp, rtx target)
5491 if (call_expr_nargs (exp) < 2)
5492 return const0_rtx;
5493 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5494 EXPAND_NORMAL);
5495 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5496 && (call_expr_nargs (exp) < 3
5497 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5498 return target;
5501 void
5502 expand_builtin_trap (void)
5504 if (targetm.have_trap ())
5506 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5507 /* For trap insns when not accumulating outgoing args force
5508 REG_ARGS_SIZE note to prevent crossjumping of calls with
5509 different args sizes. */
5510 if (!ACCUMULATE_OUTGOING_ARGS)
5511 add_args_size_note (insn, stack_pointer_delta);
5513 else
5515 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5516 tree call_expr = build_call_expr (fn, 0);
5517 expand_call (call_expr, NULL_RTX, false);
5520 emit_barrier ();
5523 /* Expand a call to __builtin_unreachable. We do nothing except emit
5524 a barrier saying that control flow will not pass here.
5526 It is the responsibility of the program being compiled to ensure
5527 that control flow does never reach __builtin_unreachable. */
5528 static void
5529 expand_builtin_unreachable (void)
5531 emit_barrier ();
5534 /* Expand EXP, a call to fabs, fabsf or fabsl.
5535 Return NULL_RTX if a normal call should be emitted rather than expanding
5536 the function inline. If convenient, the result should be placed
5537 in TARGET. SUBTARGET may be used as the target for computing
5538 the operand. */
5540 static rtx
5541 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5543 machine_mode mode;
5544 tree arg;
5545 rtx op0;
5547 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5548 return NULL_RTX;
5550 arg = CALL_EXPR_ARG (exp, 0);
5551 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5552 mode = TYPE_MODE (TREE_TYPE (arg));
5553 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5554 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5557 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5558 Return NULL is a normal call should be emitted rather than expanding the
5559 function inline. If convenient, the result should be placed in TARGET.
5560 SUBTARGET may be used as the target for computing the operand. */
5562 static rtx
5563 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5565 rtx op0, op1;
5566 tree arg;
5568 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5569 return NULL_RTX;
5571 arg = CALL_EXPR_ARG (exp, 0);
5572 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5574 arg = CALL_EXPR_ARG (exp, 1);
5575 op1 = expand_normal (arg);
5577 return expand_copysign (op0, op1, target);
5580 /* Expand a call to __builtin___clear_cache. */
5582 static rtx
5583 expand_builtin___clear_cache (tree exp)
5585 if (!targetm.code_for_clear_cache)
5587 #ifdef CLEAR_INSN_CACHE
5588 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5589 does something. Just do the default expansion to a call to
5590 __clear_cache(). */
5591 return NULL_RTX;
5592 #else
5593 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5594 does nothing. There is no need to call it. Do nothing. */
5595 return const0_rtx;
5596 #endif /* CLEAR_INSN_CACHE */
5599 /* We have a "clear_cache" insn, and it will handle everything. */
5600 tree begin, end;
5601 rtx begin_rtx, end_rtx;
5603 /* We must not expand to a library call. If we did, any
5604 fallback library function in libgcc that might contain a call to
5605 __builtin___clear_cache() would recurse infinitely. */
5606 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5608 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5609 return const0_rtx;
5612 if (targetm.have_clear_cache ())
5614 struct expand_operand ops[2];
5616 begin = CALL_EXPR_ARG (exp, 0);
5617 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5619 end = CALL_EXPR_ARG (exp, 1);
5620 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5622 create_address_operand (&ops[0], begin_rtx);
5623 create_address_operand (&ops[1], end_rtx);
5624 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5625 return const0_rtx;
5627 return const0_rtx;
5630 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5632 static rtx
5633 round_trampoline_addr (rtx tramp)
5635 rtx temp, addend, mask;
5637 /* If we don't need too much alignment, we'll have been guaranteed
5638 proper alignment by get_trampoline_type. */
5639 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5640 return tramp;
5642 /* Round address up to desired boundary. */
5643 temp = gen_reg_rtx (Pmode);
5644 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5645 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5647 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5648 temp, 0, OPTAB_LIB_WIDEN);
5649 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5650 temp, 0, OPTAB_LIB_WIDEN);
5652 return tramp;
5655 static rtx
5656 expand_builtin_init_trampoline (tree exp, bool onstack)
5658 tree t_tramp, t_func, t_chain;
5659 rtx m_tramp, r_tramp, r_chain, tmp;
5661 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5662 POINTER_TYPE, VOID_TYPE))
5663 return NULL_RTX;
5665 t_tramp = CALL_EXPR_ARG (exp, 0);
5666 t_func = CALL_EXPR_ARG (exp, 1);
5667 t_chain = CALL_EXPR_ARG (exp, 2);
5669 r_tramp = expand_normal (t_tramp);
5670 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5671 MEM_NOTRAP_P (m_tramp) = 1;
5673 /* If ONSTACK, the TRAMP argument should be the address of a field
5674 within the local function's FRAME decl. Either way, let's see if
5675 we can fill in the MEM_ATTRs for this memory. */
5676 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5677 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5679 /* Creator of a heap trampoline is responsible for making sure the
5680 address is aligned to at least STACK_BOUNDARY. Normally malloc
5681 will ensure this anyhow. */
5682 tmp = round_trampoline_addr (r_tramp);
5683 if (tmp != r_tramp)
5685 m_tramp = change_address (m_tramp, BLKmode, tmp);
5686 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5687 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5690 /* The FUNC argument should be the address of the nested function.
5691 Extract the actual function decl to pass to the hook. */
5692 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5693 t_func = TREE_OPERAND (t_func, 0);
5694 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5696 r_chain = expand_normal (t_chain);
5698 /* Generate insns to initialize the trampoline. */
5699 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5701 if (onstack)
5703 trampolines_created = 1;
5705 if (targetm.calls.custom_function_descriptors != 0)
5706 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5707 "trampoline generated for nested function %qD", t_func);
5710 return const0_rtx;
5713 static rtx
5714 expand_builtin_adjust_trampoline (tree exp)
5716 rtx tramp;
5718 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5719 return NULL_RTX;
5721 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5722 tramp = round_trampoline_addr (tramp);
5723 if (targetm.calls.trampoline_adjust_address)
5724 tramp = targetm.calls.trampoline_adjust_address (tramp);
5726 return tramp;
5729 /* Expand a call to the builtin descriptor initialization routine.
5730 A descriptor is made up of a couple of pointers to the static
5731 chain and the code entry in this order. */
5733 static rtx
5734 expand_builtin_init_descriptor (tree exp)
5736 tree t_descr, t_func, t_chain;
5737 rtx m_descr, r_descr, r_func, r_chain;
5739 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5740 VOID_TYPE))
5741 return NULL_RTX;
5743 t_descr = CALL_EXPR_ARG (exp, 0);
5744 t_func = CALL_EXPR_ARG (exp, 1);
5745 t_chain = CALL_EXPR_ARG (exp, 2);
5747 r_descr = expand_normal (t_descr);
5748 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5749 MEM_NOTRAP_P (m_descr) = 1;
5751 r_func = expand_normal (t_func);
5752 r_chain = expand_normal (t_chain);
5754 /* Generate insns to initialize the descriptor. */
5755 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5756 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5757 POINTER_SIZE / BITS_PER_UNIT), r_func);
5759 return const0_rtx;
5762 /* Expand a call to the builtin descriptor adjustment routine. */
5764 static rtx
5765 expand_builtin_adjust_descriptor (tree exp)
5767 rtx tramp;
5769 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5770 return NULL_RTX;
5772 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5774 /* Unalign the descriptor to allow runtime identification. */
5775 tramp = plus_constant (ptr_mode, tramp,
5776 targetm.calls.custom_function_descriptors);
5778 return force_operand (tramp, NULL_RTX);
5781 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5782 function. The function first checks whether the back end provides
5783 an insn to implement signbit for the respective mode. If not, it
5784 checks whether the floating point format of the value is such that
5785 the sign bit can be extracted. If that is not the case, error out.
5786 EXP is the expression that is a call to the builtin function; if
5787 convenient, the result should be placed in TARGET. */
5788 static rtx
5789 expand_builtin_signbit (tree exp, rtx target)
5791 const struct real_format *fmt;
5792 scalar_float_mode fmode;
5793 scalar_int_mode rmode, imode;
5794 tree arg;
5795 int word, bitpos;
5796 enum insn_code icode;
5797 rtx temp;
5798 location_t loc = EXPR_LOCATION (exp);
5800 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5801 return NULL_RTX;
5803 arg = CALL_EXPR_ARG (exp, 0);
5804 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5805 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5806 fmt = REAL_MODE_FORMAT (fmode);
5808 arg = builtin_save_expr (arg);
5810 /* Expand the argument yielding a RTX expression. */
5811 temp = expand_normal (arg);
5813 /* Check if the back end provides an insn that handles signbit for the
5814 argument's mode. */
5815 icode = optab_handler (signbit_optab, fmode);
5816 if (icode != CODE_FOR_nothing)
5818 rtx_insn *last = get_last_insn ();
5819 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5820 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5821 return target;
5822 delete_insns_since (last);
5825 /* For floating point formats without a sign bit, implement signbit
5826 as "ARG < 0.0". */
5827 bitpos = fmt->signbit_ro;
5828 if (bitpos < 0)
5830 /* But we can't do this if the format supports signed zero. */
5831 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5833 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5834 build_real (TREE_TYPE (arg), dconst0));
5835 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5838 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5840 imode = int_mode_for_mode (fmode).require ();
5841 temp = gen_lowpart (imode, temp);
5843 else
5845 imode = word_mode;
5846 /* Handle targets with different FP word orders. */
5847 if (FLOAT_WORDS_BIG_ENDIAN)
5848 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5849 else
5850 word = bitpos / BITS_PER_WORD;
5851 temp = operand_subword_force (temp, word, fmode);
5852 bitpos = bitpos % BITS_PER_WORD;
5855 /* Force the intermediate word_mode (or narrower) result into a
5856 register. This avoids attempting to create paradoxical SUBREGs
5857 of floating point modes below. */
5858 temp = force_reg (imode, temp);
5860 /* If the bitpos is within the "result mode" lowpart, the operation
5861 can be implement with a single bitwise AND. Otherwise, we need
5862 a right shift and an AND. */
5864 if (bitpos < GET_MODE_BITSIZE (rmode))
5866 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5868 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5869 temp = gen_lowpart (rmode, temp);
5870 temp = expand_binop (rmode, and_optab, temp,
5871 immed_wide_int_const (mask, rmode),
5872 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5874 else
5876 /* Perform a logical right shift to place the signbit in the least
5877 significant bit, then truncate the result to the desired mode
5878 and mask just this bit. */
5879 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5880 temp = gen_lowpart (rmode, temp);
5881 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5882 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5885 return temp;
5888 /* Expand fork or exec calls. TARGET is the desired target of the
5889 call. EXP is the call. FN is the
5890 identificator of the actual function. IGNORE is nonzero if the
5891 value is to be ignored. */
5893 static rtx
5894 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5896 tree id, decl;
5897 tree call;
5899 /* If we are not profiling, just call the function. */
5900 if (!profile_arc_flag)
5901 return NULL_RTX;
5903 /* Otherwise call the wrapper. This should be equivalent for the rest of
5904 compiler, so the code does not diverge, and the wrapper may run the
5905 code necessary for keeping the profiling sane. */
5907 switch (DECL_FUNCTION_CODE (fn))
5909 case BUILT_IN_FORK:
5910 id = get_identifier ("__gcov_fork");
5911 break;
5913 case BUILT_IN_EXECL:
5914 id = get_identifier ("__gcov_execl");
5915 break;
5917 case BUILT_IN_EXECV:
5918 id = get_identifier ("__gcov_execv");
5919 break;
5921 case BUILT_IN_EXECLP:
5922 id = get_identifier ("__gcov_execlp");
5923 break;
5925 case BUILT_IN_EXECLE:
5926 id = get_identifier ("__gcov_execle");
5927 break;
5929 case BUILT_IN_EXECVP:
5930 id = get_identifier ("__gcov_execvp");
5931 break;
5933 case BUILT_IN_EXECVE:
5934 id = get_identifier ("__gcov_execve");
5935 break;
5937 default:
5938 gcc_unreachable ();
5941 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5942 FUNCTION_DECL, id, TREE_TYPE (fn));
5943 DECL_EXTERNAL (decl) = 1;
5944 TREE_PUBLIC (decl) = 1;
5945 DECL_ARTIFICIAL (decl) = 1;
5946 TREE_NOTHROW (decl) = 1;
5947 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5948 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5949 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5950 return expand_call (call, target, ignore);
5955 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5956 the pointer in these functions is void*, the tree optimizers may remove
5957 casts. The mode computed in expand_builtin isn't reliable either, due
5958 to __sync_bool_compare_and_swap.
5960 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5961 group of builtins. This gives us log2 of the mode size. */
5963 static inline machine_mode
5964 get_builtin_sync_mode (int fcode_diff)
5966 /* The size is not negotiable, so ask not to get BLKmode in return
5967 if the target indicates that a smaller size would be better. */
5968 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5971 /* Expand the memory expression LOC and return the appropriate memory operand
5972 for the builtin_sync operations. */
5974 static rtx
5975 get_builtin_sync_mem (tree loc, machine_mode mode)
5977 rtx addr, mem;
5978 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5979 ? TREE_TYPE (TREE_TYPE (loc))
5980 : TREE_TYPE (loc));
5981 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5983 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5984 addr = convert_memory_address (addr_mode, addr);
5986 /* Note that we explicitly do not want any alias information for this
5987 memory, so that we kill all other live memories. Otherwise we don't
5988 satisfy the full barrier semantics of the intrinsic. */
5989 mem = gen_rtx_MEM (mode, addr);
5991 set_mem_addr_space (mem, addr_space);
5993 mem = validize_mem (mem);
5995 /* The alignment needs to be at least according to that of the mode. */
5996 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5997 get_pointer_alignment (loc)));
5998 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5999 MEM_VOLATILE_P (mem) = 1;
6001 return mem;
6004 /* Make sure an argument is in the right mode.
6005 EXP is the tree argument.
6006 MODE is the mode it should be in. */
6008 static rtx
6009 expand_expr_force_mode (tree exp, machine_mode mode)
6011 rtx val;
6012 machine_mode old_mode;
6014 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6015 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6016 of CONST_INTs, where we know the old_mode only from the call argument. */
6018 old_mode = GET_MODE (val);
6019 if (old_mode == VOIDmode)
6020 old_mode = TYPE_MODE (TREE_TYPE (exp));
6021 val = convert_modes (mode, old_mode, val, 1);
6022 return val;
6026 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6027 EXP is the CALL_EXPR. CODE is the rtx code
6028 that corresponds to the arithmetic or logical operation from the name;
6029 an exception here is that NOT actually means NAND. TARGET is an optional
6030 place for us to store the results; AFTER is true if this is the
6031 fetch_and_xxx form. */
6033 static rtx
6034 expand_builtin_sync_operation (machine_mode mode, tree exp,
6035 enum rtx_code code, bool after,
6036 rtx target)
6038 rtx val, mem;
6039 location_t loc = EXPR_LOCATION (exp);
6041 if (code == NOT && warn_sync_nand)
6043 tree fndecl = get_callee_fndecl (exp);
6044 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6046 static bool warned_f_a_n, warned_n_a_f;
6048 switch (fcode)
6050 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6051 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6052 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6053 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6054 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6055 if (warned_f_a_n)
6056 break;
6058 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6059 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6060 warned_f_a_n = true;
6061 break;
6063 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6064 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6065 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6066 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6067 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6068 if (warned_n_a_f)
6069 break;
6071 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6072 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6073 warned_n_a_f = true;
6074 break;
6076 default:
6077 gcc_unreachable ();
6081 /* Expand the operands. */
6082 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6083 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6085 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6086 after);
6089 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6090 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6091 true if this is the boolean form. TARGET is a place for us to store the
6092 results; this is NOT optional if IS_BOOL is true. */
6094 static rtx
6095 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6096 bool is_bool, rtx target)
6098 rtx old_val, new_val, mem;
6099 rtx *pbool, *poval;
6101 /* Expand the operands. */
6102 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6103 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6104 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6106 pbool = poval = NULL;
6107 if (target != const0_rtx)
6109 if (is_bool)
6110 pbool = &target;
6111 else
6112 poval = &target;
6114 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6115 false, MEMMODEL_SYNC_SEQ_CST,
6116 MEMMODEL_SYNC_SEQ_CST))
6117 return NULL_RTX;
6119 return target;
6122 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6123 general form is actually an atomic exchange, and some targets only
6124 support a reduced form with the second argument being a constant 1.
6125 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6126 the results. */
6128 static rtx
6129 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6130 rtx target)
6132 rtx val, mem;
6134 /* Expand the operands. */
6135 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6136 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6138 return expand_sync_lock_test_and_set (target, mem, val);
6141 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6143 static void
6144 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6146 rtx mem;
6148 /* Expand the operands. */
6149 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6151 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6154 /* Given an integer representing an ``enum memmodel'', verify its
6155 correctness and return the memory model enum. */
6157 static enum memmodel
6158 get_memmodel (tree exp)
6160 rtx op;
6161 unsigned HOST_WIDE_INT val;
6162 location_t loc
6163 = expansion_point_location_if_in_system_header (input_location);
6165 /* If the parameter is not a constant, it's a run time value so we'll just
6166 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6167 if (TREE_CODE (exp) != INTEGER_CST)
6168 return MEMMODEL_SEQ_CST;
6170 op = expand_normal (exp);
6172 val = INTVAL (op);
6173 if (targetm.memmodel_check)
6174 val = targetm.memmodel_check (val);
6175 else if (val & ~MEMMODEL_MASK)
6177 warning_at (loc, OPT_Winvalid_memory_model,
6178 "unknown architecture specifier in memory model to builtin");
6179 return MEMMODEL_SEQ_CST;
6182 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6183 if (memmodel_base (val) >= MEMMODEL_LAST)
6185 warning_at (loc, OPT_Winvalid_memory_model,
6186 "invalid memory model argument to builtin");
6187 return MEMMODEL_SEQ_CST;
6190 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6191 be conservative and promote consume to acquire. */
6192 if (val == MEMMODEL_CONSUME)
6193 val = MEMMODEL_ACQUIRE;
6195 return (enum memmodel) val;
6198 /* Expand the __atomic_exchange intrinsic:
6199 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6200 EXP is the CALL_EXPR.
6201 TARGET is an optional place for us to store the results. */
6203 static rtx
6204 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6206 rtx val, mem;
6207 enum memmodel model;
6209 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6211 if (!flag_inline_atomics)
6212 return NULL_RTX;
6214 /* Expand the operands. */
6215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6216 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6218 return expand_atomic_exchange (target, mem, val, model);
6221 /* Expand the __atomic_compare_exchange intrinsic:
6222 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6223 TYPE desired, BOOL weak,
6224 enum memmodel success,
6225 enum memmodel failure)
6226 EXP is the CALL_EXPR.
6227 TARGET is an optional place for us to store the results. */
6229 static rtx
6230 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6231 rtx target)
6233 rtx expect, desired, mem, oldval;
6234 rtx_code_label *label;
6235 enum memmodel success, failure;
6236 tree weak;
6237 bool is_weak;
6238 location_t loc
6239 = expansion_point_location_if_in_system_header (input_location);
6241 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6242 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6244 if (failure > success)
6246 warning_at (loc, OPT_Winvalid_memory_model,
6247 "failure memory model cannot be stronger than success "
6248 "memory model for %<__atomic_compare_exchange%>");
6249 success = MEMMODEL_SEQ_CST;
6252 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6254 warning_at (loc, OPT_Winvalid_memory_model,
6255 "invalid failure memory model for "
6256 "%<__atomic_compare_exchange%>");
6257 failure = MEMMODEL_SEQ_CST;
6258 success = MEMMODEL_SEQ_CST;
6262 if (!flag_inline_atomics)
6263 return NULL_RTX;
6265 /* Expand the operands. */
6266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6268 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6269 expect = convert_memory_address (Pmode, expect);
6270 expect = gen_rtx_MEM (mode, expect);
6271 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6273 weak = CALL_EXPR_ARG (exp, 3);
6274 is_weak = false;
6275 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6276 is_weak = true;
6278 if (target == const0_rtx)
6279 target = NULL;
6281 /* Lest the rtl backend create a race condition with an imporoper store
6282 to memory, always create a new pseudo for OLDVAL. */
6283 oldval = NULL;
6285 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6286 is_weak, success, failure))
6287 return NULL_RTX;
6289 /* Conditionally store back to EXPECT, lest we create a race condition
6290 with an improper store to memory. */
6291 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6292 the normal case where EXPECT is totally private, i.e. a register. At
6293 which point the store can be unconditional. */
6294 label = gen_label_rtx ();
6295 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6296 GET_MODE (target), 1, label);
6297 emit_move_insn (expect, oldval);
6298 emit_label (label);
6300 return target;
6303 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6304 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6305 call. The weak parameter must be dropped to match the expected parameter
6306 list and the expected argument changed from value to pointer to memory
6307 slot. */
6309 static void
6310 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6312 unsigned int z;
6313 vec<tree, va_gc> *vec;
6315 vec_alloc (vec, 5);
6316 vec->quick_push (gimple_call_arg (call, 0));
6317 tree expected = gimple_call_arg (call, 1);
6318 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6319 TREE_TYPE (expected));
6320 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6321 if (expd != x)
6322 emit_move_insn (x, expd);
6323 tree v = make_tree (TREE_TYPE (expected), x);
6324 vec->quick_push (build1 (ADDR_EXPR,
6325 build_pointer_type (TREE_TYPE (expected)), v));
6326 vec->quick_push (gimple_call_arg (call, 2));
6327 /* Skip the boolean weak parameter. */
6328 for (z = 4; z < 6; z++)
6329 vec->quick_push (gimple_call_arg (call, z));
6330 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6331 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6332 gcc_assert (bytes_log2 < 5);
6333 built_in_function fncode
6334 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6335 + bytes_log2);
6336 tree fndecl = builtin_decl_explicit (fncode);
6337 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6338 fndecl);
6339 tree exp = build_call_vec (boolean_type_node, fn, vec);
6340 tree lhs = gimple_call_lhs (call);
6341 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6342 if (lhs)
6344 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6345 if (GET_MODE (boolret) != mode)
6346 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6347 x = force_reg (mode, x);
6348 write_complex_part (target, boolret, true);
6349 write_complex_part (target, x, false);
6353 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6355 void
6356 expand_ifn_atomic_compare_exchange (gcall *call)
6358 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6359 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6360 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6361 rtx expect, desired, mem, oldval, boolret;
6362 enum memmodel success, failure;
6363 tree lhs;
6364 bool is_weak;
6365 location_t loc
6366 = expansion_point_location_if_in_system_header (gimple_location (call));
6368 success = get_memmodel (gimple_call_arg (call, 4));
6369 failure = get_memmodel (gimple_call_arg (call, 5));
6371 if (failure > success)
6373 warning_at (loc, OPT_Winvalid_memory_model,
6374 "failure memory model cannot be stronger than success "
6375 "memory model for %<__atomic_compare_exchange%>");
6376 success = MEMMODEL_SEQ_CST;
6379 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6381 warning_at (loc, OPT_Winvalid_memory_model,
6382 "invalid failure memory model for "
6383 "%<__atomic_compare_exchange%>");
6384 failure = MEMMODEL_SEQ_CST;
6385 success = MEMMODEL_SEQ_CST;
6388 if (!flag_inline_atomics)
6390 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6391 return;
6394 /* Expand the operands. */
6395 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6397 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6398 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6400 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6402 boolret = NULL;
6403 oldval = NULL;
6405 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6406 is_weak, success, failure))
6408 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6409 return;
6412 lhs = gimple_call_lhs (call);
6413 if (lhs)
6415 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6416 if (GET_MODE (boolret) != mode)
6417 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6418 write_complex_part (target, boolret, true);
6419 write_complex_part (target, oldval, false);
6423 /* Expand the __atomic_load intrinsic:
6424 TYPE __atomic_load (TYPE *object, enum memmodel)
6425 EXP is the CALL_EXPR.
6426 TARGET is an optional place for us to store the results. */
6428 static rtx
6429 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6431 rtx mem;
6432 enum memmodel model;
6434 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6435 if (is_mm_release (model) || is_mm_acq_rel (model))
6437 location_t loc
6438 = expansion_point_location_if_in_system_header (input_location);
6439 warning_at (loc, OPT_Winvalid_memory_model,
6440 "invalid memory model for %<__atomic_load%>");
6441 model = MEMMODEL_SEQ_CST;
6444 if (!flag_inline_atomics)
6445 return NULL_RTX;
6447 /* Expand the operand. */
6448 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6450 return expand_atomic_load (target, mem, model);
6454 /* Expand the __atomic_store intrinsic:
6455 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6456 EXP is the CALL_EXPR.
6457 TARGET is an optional place for us to store the results. */
6459 static rtx
6460 expand_builtin_atomic_store (machine_mode mode, tree exp)
6462 rtx mem, val;
6463 enum memmodel model;
6465 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6466 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6467 || is_mm_release (model)))
6469 location_t loc
6470 = expansion_point_location_if_in_system_header (input_location);
6471 warning_at (loc, OPT_Winvalid_memory_model,
6472 "invalid memory model for %<__atomic_store%>");
6473 model = MEMMODEL_SEQ_CST;
6476 if (!flag_inline_atomics)
6477 return NULL_RTX;
6479 /* Expand the operands. */
6480 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6481 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6483 return expand_atomic_store (mem, val, model, false);
6486 /* Expand the __atomic_fetch_XXX intrinsic:
6487 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6488 EXP is the CALL_EXPR.
6489 TARGET is an optional place for us to store the results.
6490 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6491 FETCH_AFTER is true if returning the result of the operation.
6492 FETCH_AFTER is false if returning the value before the operation.
6493 IGNORE is true if the result is not used.
6494 EXT_CALL is the correct builtin for an external call if this cannot be
6495 resolved to an instruction sequence. */
6497 static rtx
6498 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6499 enum rtx_code code, bool fetch_after,
6500 bool ignore, enum built_in_function ext_call)
6502 rtx val, mem, ret;
6503 enum memmodel model;
6504 tree fndecl;
6505 tree addr;
6507 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6509 /* Expand the operands. */
6510 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6511 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6513 /* Only try generating instructions if inlining is turned on. */
6514 if (flag_inline_atomics)
6516 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6517 if (ret)
6518 return ret;
6521 /* Return if a different routine isn't needed for the library call. */
6522 if (ext_call == BUILT_IN_NONE)
6523 return NULL_RTX;
6525 /* Change the call to the specified function. */
6526 fndecl = get_callee_fndecl (exp);
6527 addr = CALL_EXPR_FN (exp);
6528 STRIP_NOPS (addr);
6530 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6531 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6533 /* If we will emit code after the call, the call cannot be a tail call.
6534 If it is emitted as a tail call, a barrier is emitted after it, and
6535 then all trailing code is removed. */
6536 if (!ignore)
6537 CALL_EXPR_TAILCALL (exp) = 0;
6539 /* Expand the call here so we can emit trailing code. */
6540 ret = expand_call (exp, target, ignore);
6542 /* Replace the original function just in case it matters. */
6543 TREE_OPERAND (addr, 0) = fndecl;
6545 /* Then issue the arithmetic correction to return the right result. */
6546 if (!ignore)
6548 if (code == NOT)
6550 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6551 OPTAB_LIB_WIDEN);
6552 ret = expand_simple_unop (mode, NOT, ret, target, true);
6554 else
6555 ret = expand_simple_binop (mode, code, ret, val, target, true,
6556 OPTAB_LIB_WIDEN);
6558 return ret;
6561 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6563 void
6564 expand_ifn_atomic_bit_test_and (gcall *call)
6566 tree ptr = gimple_call_arg (call, 0);
6567 tree bit = gimple_call_arg (call, 1);
6568 tree flag = gimple_call_arg (call, 2);
6569 tree lhs = gimple_call_lhs (call);
6570 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6571 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6572 enum rtx_code code;
6573 optab optab;
6574 struct expand_operand ops[5];
6576 gcc_assert (flag_inline_atomics);
6578 if (gimple_call_num_args (call) == 4)
6579 model = get_memmodel (gimple_call_arg (call, 3));
6581 rtx mem = get_builtin_sync_mem (ptr, mode);
6582 rtx val = expand_expr_force_mode (bit, mode);
6584 switch (gimple_call_internal_fn (call))
6586 case IFN_ATOMIC_BIT_TEST_AND_SET:
6587 code = IOR;
6588 optab = atomic_bit_test_and_set_optab;
6589 break;
6590 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6591 code = XOR;
6592 optab = atomic_bit_test_and_complement_optab;
6593 break;
6594 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6595 code = AND;
6596 optab = atomic_bit_test_and_reset_optab;
6597 break;
6598 default:
6599 gcc_unreachable ();
6602 if (lhs == NULL_TREE)
6604 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6605 val, NULL_RTX, true, OPTAB_DIRECT);
6606 if (code == AND)
6607 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6608 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6609 return;
6612 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6613 enum insn_code icode = direct_optab_handler (optab, mode);
6614 gcc_assert (icode != CODE_FOR_nothing);
6615 create_output_operand (&ops[0], target, mode);
6616 create_fixed_operand (&ops[1], mem);
6617 create_convert_operand_to (&ops[2], val, mode, true);
6618 create_integer_operand (&ops[3], model);
6619 create_integer_operand (&ops[4], integer_onep (flag));
6620 if (maybe_expand_insn (icode, 5, ops))
6621 return;
6623 rtx bitval = val;
6624 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6625 val, NULL_RTX, true, OPTAB_DIRECT);
6626 rtx maskval = val;
6627 if (code == AND)
6628 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6629 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6630 code, model, false);
6631 if (integer_onep (flag))
6633 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6634 NULL_RTX, true, OPTAB_DIRECT);
6635 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6636 true, OPTAB_DIRECT);
6638 else
6639 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6640 OPTAB_DIRECT);
6641 if (result != target)
6642 emit_move_insn (target, result);
6645 /* Expand an atomic clear operation.
6646 void _atomic_clear (BOOL *obj, enum memmodel)
6647 EXP is the call expression. */
6649 static rtx
6650 expand_builtin_atomic_clear (tree exp)
6652 machine_mode mode;
6653 rtx mem, ret;
6654 enum memmodel model;
6656 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6657 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6658 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6660 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6662 location_t loc
6663 = expansion_point_location_if_in_system_header (input_location);
6664 warning_at (loc, OPT_Winvalid_memory_model,
6665 "invalid memory model for %<__atomic_store%>");
6666 model = MEMMODEL_SEQ_CST;
6669 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6670 Failing that, a store is issued by __atomic_store. The only way this can
6671 fail is if the bool type is larger than a word size. Unlikely, but
6672 handle it anyway for completeness. Assume a single threaded model since
6673 there is no atomic support in this case, and no barriers are required. */
6674 ret = expand_atomic_store (mem, const0_rtx, model, true);
6675 if (!ret)
6676 emit_move_insn (mem, const0_rtx);
6677 return const0_rtx;
6680 /* Expand an atomic test_and_set operation.
6681 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6682 EXP is the call expression. */
6684 static rtx
6685 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6687 rtx mem;
6688 enum memmodel model;
6689 machine_mode mode;
6691 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6692 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6693 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6695 return expand_atomic_test_and_set (target, mem, model);
6699 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6700 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6702 static tree
6703 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6705 int size;
6706 machine_mode mode;
6707 unsigned int mode_align, type_align;
6709 if (TREE_CODE (arg0) != INTEGER_CST)
6710 return NULL_TREE;
6712 /* We need a corresponding integer mode for the access to be lock-free. */
6713 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6714 if (!int_mode_for_size (size, 0).exists (&mode))
6715 return boolean_false_node;
6717 mode_align = GET_MODE_ALIGNMENT (mode);
6719 if (TREE_CODE (arg1) == INTEGER_CST)
6721 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6723 /* Either this argument is null, or it's a fake pointer encoding
6724 the alignment of the object. */
6725 val = least_bit_hwi (val);
6726 val *= BITS_PER_UNIT;
6728 if (val == 0 || mode_align < val)
6729 type_align = mode_align;
6730 else
6731 type_align = val;
6733 else
6735 tree ttype = TREE_TYPE (arg1);
6737 /* This function is usually invoked and folded immediately by the front
6738 end before anything else has a chance to look at it. The pointer
6739 parameter at this point is usually cast to a void *, so check for that
6740 and look past the cast. */
6741 if (CONVERT_EXPR_P (arg1)
6742 && POINTER_TYPE_P (ttype)
6743 && VOID_TYPE_P (TREE_TYPE (ttype))
6744 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6745 arg1 = TREE_OPERAND (arg1, 0);
6747 ttype = TREE_TYPE (arg1);
6748 gcc_assert (POINTER_TYPE_P (ttype));
6750 /* Get the underlying type of the object. */
6751 ttype = TREE_TYPE (ttype);
6752 type_align = TYPE_ALIGN (ttype);
6755 /* If the object has smaller alignment, the lock free routines cannot
6756 be used. */
6757 if (type_align < mode_align)
6758 return boolean_false_node;
6760 /* Check if a compare_and_swap pattern exists for the mode which represents
6761 the required size. The pattern is not allowed to fail, so the existence
6762 of the pattern indicates support is present. Also require that an
6763 atomic load exists for the required size. */
6764 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6765 return boolean_true_node;
6766 else
6767 return boolean_false_node;
6770 /* Return true if the parameters to call EXP represent an object which will
6771 always generate lock free instructions. The first argument represents the
6772 size of the object, and the second parameter is a pointer to the object
6773 itself. If NULL is passed for the object, then the result is based on
6774 typical alignment for an object of the specified size. Otherwise return
6775 false. */
6777 static rtx
6778 expand_builtin_atomic_always_lock_free (tree exp)
6780 tree size;
6781 tree arg0 = CALL_EXPR_ARG (exp, 0);
6782 tree arg1 = CALL_EXPR_ARG (exp, 1);
6784 if (TREE_CODE (arg0) != INTEGER_CST)
6786 error ("non-constant argument 1 to __atomic_always_lock_free");
6787 return const0_rtx;
6790 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6791 if (size == boolean_true_node)
6792 return const1_rtx;
6793 return const0_rtx;
6796 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6797 is lock free on this architecture. */
6799 static tree
6800 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6802 if (!flag_inline_atomics)
6803 return NULL_TREE;
6805 /* If it isn't always lock free, don't generate a result. */
6806 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6807 return boolean_true_node;
6809 return NULL_TREE;
6812 /* Return true if the parameters to call EXP represent an object which will
6813 always generate lock free instructions. The first argument represents the
6814 size of the object, and the second parameter is a pointer to the object
6815 itself. If NULL is passed for the object, then the result is based on
6816 typical alignment for an object of the specified size. Otherwise return
6817 NULL*/
6819 static rtx
6820 expand_builtin_atomic_is_lock_free (tree exp)
6822 tree size;
6823 tree arg0 = CALL_EXPR_ARG (exp, 0);
6824 tree arg1 = CALL_EXPR_ARG (exp, 1);
6826 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6828 error ("non-integer argument 1 to __atomic_is_lock_free");
6829 return NULL_RTX;
6832 if (!flag_inline_atomics)
6833 return NULL_RTX;
6835 /* If the value is known at compile time, return the RTX for it. */
6836 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6837 if (size == boolean_true_node)
6838 return const1_rtx;
6840 return NULL_RTX;
6843 /* Expand the __atomic_thread_fence intrinsic:
6844 void __atomic_thread_fence (enum memmodel)
6845 EXP is the CALL_EXPR. */
6847 static void
6848 expand_builtin_atomic_thread_fence (tree exp)
6850 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6851 expand_mem_thread_fence (model);
6854 /* Expand the __atomic_signal_fence intrinsic:
6855 void __atomic_signal_fence (enum memmodel)
6856 EXP is the CALL_EXPR. */
6858 static void
6859 expand_builtin_atomic_signal_fence (tree exp)
6861 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6862 expand_mem_signal_fence (model);
6865 /* Expand the __sync_synchronize intrinsic. */
6867 static void
6868 expand_builtin_sync_synchronize (void)
6870 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6873 static rtx
6874 expand_builtin_thread_pointer (tree exp, rtx target)
6876 enum insn_code icode;
6877 if (!validate_arglist (exp, VOID_TYPE))
6878 return const0_rtx;
6879 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6880 if (icode != CODE_FOR_nothing)
6882 struct expand_operand op;
6883 /* If the target is not sutitable then create a new target. */
6884 if (target == NULL_RTX
6885 || !REG_P (target)
6886 || GET_MODE (target) != Pmode)
6887 target = gen_reg_rtx (Pmode);
6888 create_output_operand (&op, target, Pmode);
6889 expand_insn (icode, 1, &op);
6890 return target;
6892 error ("__builtin_thread_pointer is not supported on this target");
6893 return const0_rtx;
6896 static void
6897 expand_builtin_set_thread_pointer (tree exp)
6899 enum insn_code icode;
6900 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6901 return;
6902 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6903 if (icode != CODE_FOR_nothing)
6905 struct expand_operand op;
6906 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6907 Pmode, EXPAND_NORMAL);
6908 create_input_operand (&op, val, Pmode);
6909 expand_insn (icode, 1, &op);
6910 return;
6912 error ("__builtin_set_thread_pointer is not supported on this target");
6916 /* Emit code to restore the current value of stack. */
6918 static void
6919 expand_stack_restore (tree var)
6921 rtx_insn *prev;
6922 rtx sa = expand_normal (var);
6924 sa = convert_memory_address (Pmode, sa);
6926 prev = get_last_insn ();
6927 emit_stack_restore (SAVE_BLOCK, sa);
6929 record_new_stack_level ();
6931 fixup_args_size_notes (prev, get_last_insn (), 0);
6934 /* Emit code to save the current value of stack. */
6936 static rtx
6937 expand_stack_save (void)
6939 rtx ret = NULL_RTX;
6941 emit_stack_save (SAVE_BLOCK, &ret);
6942 return ret;
6945 /* Emit code to get the openacc gang, worker or vector id or size. */
6947 static rtx
6948 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6950 const char *name;
6951 rtx fallback_retval;
6952 rtx_insn *(*gen_fn) (rtx, rtx);
6953 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6955 case BUILT_IN_GOACC_PARLEVEL_ID:
6956 name = "__builtin_goacc_parlevel_id";
6957 fallback_retval = const0_rtx;
6958 gen_fn = targetm.gen_oacc_dim_pos;
6959 break;
6960 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6961 name = "__builtin_goacc_parlevel_size";
6962 fallback_retval = const1_rtx;
6963 gen_fn = targetm.gen_oacc_dim_size;
6964 break;
6965 default:
6966 gcc_unreachable ();
6969 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6971 error ("%qs only supported in OpenACC code", name);
6972 return const0_rtx;
6975 tree arg = CALL_EXPR_ARG (exp, 0);
6976 if (TREE_CODE (arg) != INTEGER_CST)
6978 error ("non-constant argument 0 to %qs", name);
6979 return const0_rtx;
6982 int dim = TREE_INT_CST_LOW (arg);
6983 switch (dim)
6985 case GOMP_DIM_GANG:
6986 case GOMP_DIM_WORKER:
6987 case GOMP_DIM_VECTOR:
6988 break;
6989 default:
6990 error ("illegal argument 0 to %qs", name);
6991 return const0_rtx;
6994 if (ignore)
6995 return target;
6997 if (target == NULL_RTX)
6998 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7000 if (!targetm.have_oacc_dim_size ())
7002 emit_move_insn (target, fallback_retval);
7003 return target;
7006 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7007 emit_insn (gen_fn (reg, GEN_INT (dim)));
7008 if (reg != target)
7009 emit_move_insn (target, reg);
7011 return target;
7014 /* Expand a string compare operation using a sequence of char comparison
7015 to get rid of the calling overhead, with result going to TARGET if
7016 that's convenient.
7018 VAR_STR is the variable string source;
7019 CONST_STR is the constant string source;
7020 LENGTH is the number of chars to compare;
7021 CONST_STR_N indicates which source string is the constant string;
7022 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7024 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7026 target = (int) (unsigned char) var_str[0]
7027 - (int) (unsigned char) const_str[0];
7028 if (target != 0)
7029 goto ne_label;
7031 target = (int) (unsigned char) var_str[length - 2]
7032 - (int) (unsigned char) const_str[length - 2];
7033 if (target != 0)
7034 goto ne_label;
7035 target = (int) (unsigned char) var_str[length - 1]
7036 - (int) (unsigned char) const_str[length - 1];
7037 ne_label:
7040 static rtx
7041 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7042 unsigned HOST_WIDE_INT length,
7043 int const_str_n, machine_mode mode)
7045 HOST_WIDE_INT offset = 0;
7046 rtx var_rtx_array
7047 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7048 rtx var_rtx = NULL_RTX;
7049 rtx const_rtx = NULL_RTX;
7050 rtx result = target ? target : gen_reg_rtx (mode);
7051 rtx_code_label *ne_label = gen_label_rtx ();
7052 tree unit_type_node = unsigned_char_type_node;
7053 scalar_int_mode unit_mode
7054 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7056 start_sequence ();
7058 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7060 var_rtx
7061 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7062 const_rtx = c_readstr (const_str + offset, unit_mode);
7063 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7064 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7066 op0 = convert_modes (mode, unit_mode, op0, 1);
7067 op1 = convert_modes (mode, unit_mode, op1, 1);
7068 result = expand_simple_binop (mode, MINUS, op0, op1,
7069 result, 1, OPTAB_WIDEN);
7070 if (i < length - 1)
7071 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7072 mode, true, ne_label);
7073 offset += GET_MODE_SIZE (unit_mode);
7076 emit_label (ne_label);
7077 rtx_insn *insns = get_insns ();
7078 end_sequence ();
7079 emit_insn (insns);
7081 return result;
7084 /* Inline expansion a call to str(n)cmp, with result going to
7085 TARGET if that's convenient.
7086 If the call is not been inlined, return NULL_RTX. */
7087 static rtx
7088 inline_expand_builtin_string_cmp (tree exp, rtx target)
7090 tree fndecl = get_callee_fndecl (exp);
7091 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7092 unsigned HOST_WIDE_INT length = 0;
7093 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7095 /* Do NOT apply this inlining expansion when optimizing for size or
7096 optimization level below 2. */
7097 if (optimize < 2 || optimize_insn_for_size_p ())
7098 return NULL_RTX;
7100 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7101 || fcode == BUILT_IN_STRNCMP
7102 || fcode == BUILT_IN_MEMCMP);
7104 /* On a target where the type of the call (int) has same or narrower presicion
7105 than unsigned char, give up the inlining expansion. */
7106 if (TYPE_PRECISION (unsigned_char_type_node)
7107 >= TYPE_PRECISION (TREE_TYPE (exp)))
7108 return NULL_RTX;
7110 tree arg1 = CALL_EXPR_ARG (exp, 0);
7111 tree arg2 = CALL_EXPR_ARG (exp, 1);
7112 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7114 unsigned HOST_WIDE_INT len1 = 0;
7115 unsigned HOST_WIDE_INT len2 = 0;
7116 unsigned HOST_WIDE_INT len3 = 0;
7118 const char *src_str1 = c_getstr (arg1, &len1);
7119 const char *src_str2 = c_getstr (arg2, &len2);
7121 /* If neither strings is constant string, the call is not qualify. */
7122 if (!src_str1 && !src_str2)
7123 return NULL_RTX;
7125 /* For strncmp, if the length is not a const, not qualify. */
7126 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7127 return NULL_RTX;
7129 int const_str_n = 0;
7130 if (!len1)
7131 const_str_n = 2;
7132 else if (!len2)
7133 const_str_n = 1;
7134 else if (len2 > len1)
7135 const_str_n = 1;
7136 else
7137 const_str_n = 2;
7139 gcc_checking_assert (const_str_n > 0);
7140 length = (const_str_n == 1) ? len1 : len2;
7142 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7143 length = len3;
7145 /* If the length of the comparision is larger than the threshold,
7146 do nothing. */
7147 if (length > (unsigned HOST_WIDE_INT)
7148 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7149 return NULL_RTX;
7151 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7153 /* Now, start inline expansion the call. */
7154 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7155 (const_str_n == 1) ? src_str1 : src_str2, length,
7156 const_str_n, mode);
7159 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7160 represents the size of the first argument to that call, or VOIDmode
7161 if the argument is a pointer. IGNORE will be true if the result
7162 isn't used. */
7163 static rtx
7164 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7165 bool ignore)
7167 rtx val, failsafe;
7168 unsigned nargs = call_expr_nargs (exp);
7170 tree arg0 = CALL_EXPR_ARG (exp, 0);
7172 if (mode == VOIDmode)
7174 mode = TYPE_MODE (TREE_TYPE (arg0));
7175 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7178 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7180 /* An optional second argument can be used as a failsafe value on
7181 some machines. If it isn't present, then the failsafe value is
7182 assumed to be 0. */
7183 if (nargs > 1)
7185 tree arg1 = CALL_EXPR_ARG (exp, 1);
7186 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7188 else
7189 failsafe = const0_rtx;
7191 /* If the result isn't used, the behavior is undefined. It would be
7192 nice to emit a warning here, but path splitting means this might
7193 happen with legitimate code. So simply drop the builtin
7194 expansion in that case; we've handled any side-effects above. */
7195 if (ignore)
7196 return const0_rtx;
7198 /* If we don't have a suitable target, create one to hold the result. */
7199 if (target == NULL || GET_MODE (target) != mode)
7200 target = gen_reg_rtx (mode);
7202 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7203 val = convert_modes (mode, VOIDmode, val, false);
7205 return targetm.speculation_safe_value (mode, target, val, failsafe);
7208 /* Expand an expression EXP that calls a built-in function,
7209 with result going to TARGET if that's convenient
7210 (and in mode MODE if that's convenient).
7211 SUBTARGET may be used as the target for computing one of EXP's operands.
7212 IGNORE is nonzero if the value is to be ignored. */
7215 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7216 int ignore)
7218 tree fndecl = get_callee_fndecl (exp);
7219 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7220 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7221 int flags;
7223 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7224 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7226 /* When ASan is enabled, we don't want to expand some memory/string
7227 builtins and rely on libsanitizer's hooks. This allows us to avoid
7228 redundant checks and be sure, that possible overflow will be detected
7229 by ASan. */
7231 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7232 return expand_call (exp, target, ignore);
7234 /* When not optimizing, generate calls to library functions for a certain
7235 set of builtins. */
7236 if (!optimize
7237 && !called_as_built_in (fndecl)
7238 && fcode != BUILT_IN_FORK
7239 && fcode != BUILT_IN_EXECL
7240 && fcode != BUILT_IN_EXECV
7241 && fcode != BUILT_IN_EXECLP
7242 && fcode != BUILT_IN_EXECLE
7243 && fcode != BUILT_IN_EXECVP
7244 && fcode != BUILT_IN_EXECVE
7245 && !ALLOCA_FUNCTION_CODE_P (fcode)
7246 && fcode != BUILT_IN_FREE)
7247 return expand_call (exp, target, ignore);
7249 /* The built-in function expanders test for target == const0_rtx
7250 to determine whether the function's result will be ignored. */
7251 if (ignore)
7252 target = const0_rtx;
7254 /* If the result of a pure or const built-in function is ignored, and
7255 none of its arguments are volatile, we can avoid expanding the
7256 built-in call and just evaluate the arguments for side-effects. */
7257 if (target == const0_rtx
7258 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7259 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7261 bool volatilep = false;
7262 tree arg;
7263 call_expr_arg_iterator iter;
7265 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7266 if (TREE_THIS_VOLATILE (arg))
7268 volatilep = true;
7269 break;
7272 if (! volatilep)
7274 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7275 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7276 return const0_rtx;
7280 switch (fcode)
7282 CASE_FLT_FN (BUILT_IN_FABS):
7283 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7284 case BUILT_IN_FABSD32:
7285 case BUILT_IN_FABSD64:
7286 case BUILT_IN_FABSD128:
7287 target = expand_builtin_fabs (exp, target, subtarget);
7288 if (target)
7289 return target;
7290 break;
7292 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7293 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7294 target = expand_builtin_copysign (exp, target, subtarget);
7295 if (target)
7296 return target;
7297 break;
7299 /* Just do a normal library call if we were unable to fold
7300 the values. */
7301 CASE_FLT_FN (BUILT_IN_CABS):
7302 break;
7304 CASE_FLT_FN (BUILT_IN_FMA):
7305 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7306 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7307 if (target)
7308 return target;
7309 break;
7311 CASE_FLT_FN (BUILT_IN_ILOGB):
7312 if (! flag_unsafe_math_optimizations)
7313 break;
7314 gcc_fallthrough ();
7315 CASE_FLT_FN (BUILT_IN_ISINF):
7316 CASE_FLT_FN (BUILT_IN_FINITE):
7317 case BUILT_IN_ISFINITE:
7318 case BUILT_IN_ISNORMAL:
7319 target = expand_builtin_interclass_mathfn (exp, target);
7320 if (target)
7321 return target;
7322 break;
7324 CASE_FLT_FN (BUILT_IN_ICEIL):
7325 CASE_FLT_FN (BUILT_IN_LCEIL):
7326 CASE_FLT_FN (BUILT_IN_LLCEIL):
7327 CASE_FLT_FN (BUILT_IN_LFLOOR):
7328 CASE_FLT_FN (BUILT_IN_IFLOOR):
7329 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7330 target = expand_builtin_int_roundingfn (exp, target);
7331 if (target)
7332 return target;
7333 break;
7335 CASE_FLT_FN (BUILT_IN_IRINT):
7336 CASE_FLT_FN (BUILT_IN_LRINT):
7337 CASE_FLT_FN (BUILT_IN_LLRINT):
7338 CASE_FLT_FN (BUILT_IN_IROUND):
7339 CASE_FLT_FN (BUILT_IN_LROUND):
7340 CASE_FLT_FN (BUILT_IN_LLROUND):
7341 target = expand_builtin_int_roundingfn_2 (exp, target);
7342 if (target)
7343 return target;
7344 break;
7346 CASE_FLT_FN (BUILT_IN_POWI):
7347 target = expand_builtin_powi (exp, target);
7348 if (target)
7349 return target;
7350 break;
7352 CASE_FLT_FN (BUILT_IN_CEXPI):
7353 target = expand_builtin_cexpi (exp, target);
7354 gcc_assert (target);
7355 return target;
7357 CASE_FLT_FN (BUILT_IN_SIN):
7358 CASE_FLT_FN (BUILT_IN_COS):
7359 if (! flag_unsafe_math_optimizations)
7360 break;
7361 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7362 if (target)
7363 return target;
7364 break;
7366 CASE_FLT_FN (BUILT_IN_SINCOS):
7367 if (! flag_unsafe_math_optimizations)
7368 break;
7369 target = expand_builtin_sincos (exp);
7370 if (target)
7371 return target;
7372 break;
7374 case BUILT_IN_APPLY_ARGS:
7375 return expand_builtin_apply_args ();
7377 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7378 FUNCTION with a copy of the parameters described by
7379 ARGUMENTS, and ARGSIZE. It returns a block of memory
7380 allocated on the stack into which is stored all the registers
7381 that might possibly be used for returning the result of a
7382 function. ARGUMENTS is the value returned by
7383 __builtin_apply_args. ARGSIZE is the number of bytes of
7384 arguments that must be copied. ??? How should this value be
7385 computed? We'll also need a safe worst case value for varargs
7386 functions. */
7387 case BUILT_IN_APPLY:
7388 if (!validate_arglist (exp, POINTER_TYPE,
7389 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7390 && !validate_arglist (exp, REFERENCE_TYPE,
7391 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7392 return const0_rtx;
7393 else
7395 rtx ops[3];
7397 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7398 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7399 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7401 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7404 /* __builtin_return (RESULT) causes the function to return the
7405 value described by RESULT. RESULT is address of the block of
7406 memory returned by __builtin_apply. */
7407 case BUILT_IN_RETURN:
7408 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7409 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7410 return const0_rtx;
7412 case BUILT_IN_SAVEREGS:
7413 return expand_builtin_saveregs ();
7415 case BUILT_IN_VA_ARG_PACK:
7416 /* All valid uses of __builtin_va_arg_pack () are removed during
7417 inlining. */
7418 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7419 return const0_rtx;
7421 case BUILT_IN_VA_ARG_PACK_LEN:
7422 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7423 inlining. */
7424 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7425 return const0_rtx;
7427 /* Return the address of the first anonymous stack arg. */
7428 case BUILT_IN_NEXT_ARG:
7429 if (fold_builtin_next_arg (exp, false))
7430 return const0_rtx;
7431 return expand_builtin_next_arg ();
7433 case BUILT_IN_CLEAR_CACHE:
7434 target = expand_builtin___clear_cache (exp);
7435 if (target)
7436 return target;
7437 break;
7439 case BUILT_IN_CLASSIFY_TYPE:
7440 return expand_builtin_classify_type (exp);
7442 case BUILT_IN_CONSTANT_P:
7443 return const0_rtx;
7445 case BUILT_IN_FRAME_ADDRESS:
7446 case BUILT_IN_RETURN_ADDRESS:
7447 return expand_builtin_frame_address (fndecl, exp);
7449 /* Returns the address of the area where the structure is returned.
7450 0 otherwise. */
7451 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7452 if (call_expr_nargs (exp) != 0
7453 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7454 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7455 return const0_rtx;
7456 else
7457 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7459 CASE_BUILT_IN_ALLOCA:
7460 target = expand_builtin_alloca (exp);
7461 if (target)
7462 return target;
7463 break;
7465 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7466 return expand_asan_emit_allocas_unpoison (exp);
7468 case BUILT_IN_STACK_SAVE:
7469 return expand_stack_save ();
7471 case BUILT_IN_STACK_RESTORE:
7472 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7473 return const0_rtx;
7475 case BUILT_IN_BSWAP16:
7476 case BUILT_IN_BSWAP32:
7477 case BUILT_IN_BSWAP64:
7478 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7479 if (target)
7480 return target;
7481 break;
7483 CASE_INT_FN (BUILT_IN_FFS):
7484 target = expand_builtin_unop (target_mode, exp, target,
7485 subtarget, ffs_optab);
7486 if (target)
7487 return target;
7488 break;
7490 CASE_INT_FN (BUILT_IN_CLZ):
7491 target = expand_builtin_unop (target_mode, exp, target,
7492 subtarget, clz_optab);
7493 if (target)
7494 return target;
7495 break;
7497 CASE_INT_FN (BUILT_IN_CTZ):
7498 target = expand_builtin_unop (target_mode, exp, target,
7499 subtarget, ctz_optab);
7500 if (target)
7501 return target;
7502 break;
7504 CASE_INT_FN (BUILT_IN_CLRSB):
7505 target = expand_builtin_unop (target_mode, exp, target,
7506 subtarget, clrsb_optab);
7507 if (target)
7508 return target;
7509 break;
7511 CASE_INT_FN (BUILT_IN_POPCOUNT):
7512 target = expand_builtin_unop (target_mode, exp, target,
7513 subtarget, popcount_optab);
7514 if (target)
7515 return target;
7516 break;
7518 CASE_INT_FN (BUILT_IN_PARITY):
7519 target = expand_builtin_unop (target_mode, exp, target,
7520 subtarget, parity_optab);
7521 if (target)
7522 return target;
7523 break;
7525 case BUILT_IN_STRLEN:
7526 target = expand_builtin_strlen (exp, target, target_mode);
7527 if (target)
7528 return target;
7529 break;
7531 case BUILT_IN_STRNLEN:
7532 target = expand_builtin_strnlen (exp, target, target_mode);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_STRCAT:
7538 target = expand_builtin_strcat (exp, target);
7539 if (target)
7540 return target;
7541 break;
7543 case BUILT_IN_STRCPY:
7544 target = expand_builtin_strcpy (exp, target);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_STRNCAT:
7550 target = expand_builtin_strncat (exp, target);
7551 if (target)
7552 return target;
7553 break;
7555 case BUILT_IN_STRNCPY:
7556 target = expand_builtin_strncpy (exp, target);
7557 if (target)
7558 return target;
7559 break;
7561 case BUILT_IN_STPCPY:
7562 target = expand_builtin_stpcpy (exp, target, mode);
7563 if (target)
7564 return target;
7565 break;
7567 case BUILT_IN_STPNCPY:
7568 target = expand_builtin_stpncpy (exp, target);
7569 if (target)
7570 return target;
7571 break;
7573 case BUILT_IN_MEMCHR:
7574 target = expand_builtin_memchr (exp, target);
7575 if (target)
7576 return target;
7577 break;
7579 case BUILT_IN_MEMCPY:
7580 target = expand_builtin_memcpy (exp, target);
7581 if (target)
7582 return target;
7583 break;
7585 case BUILT_IN_MEMMOVE:
7586 target = expand_builtin_memmove (exp, target);
7587 if (target)
7588 return target;
7589 break;
7591 case BUILT_IN_MEMPCPY:
7592 target = expand_builtin_mempcpy (exp, target);
7593 if (target)
7594 return target;
7595 break;
7597 case BUILT_IN_MEMSET:
7598 target = expand_builtin_memset (exp, target, mode);
7599 if (target)
7600 return target;
7601 break;
7603 case BUILT_IN_BZERO:
7604 target = expand_builtin_bzero (exp);
7605 if (target)
7606 return target;
7607 break;
7609 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7610 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7611 when changing it to a strcmp call. */
7612 case BUILT_IN_STRCMP_EQ:
7613 target = expand_builtin_memcmp (exp, target, true);
7614 if (target)
7615 return target;
7617 /* Change this call back to a BUILT_IN_STRCMP. */
7618 TREE_OPERAND (exp, 1)
7619 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7621 /* Delete the last parameter. */
7622 unsigned int i;
7623 vec<tree, va_gc> *arg_vec;
7624 vec_alloc (arg_vec, 2);
7625 for (i = 0; i < 2; i++)
7626 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7627 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7628 /* FALLTHROUGH */
7630 case BUILT_IN_STRCMP:
7631 target = expand_builtin_strcmp (exp, target);
7632 if (target)
7633 return target;
7634 break;
7636 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7637 back to a BUILT_IN_STRNCMP. */
7638 case BUILT_IN_STRNCMP_EQ:
7639 target = expand_builtin_memcmp (exp, target, true);
7640 if (target)
7641 return target;
7643 /* Change it back to a BUILT_IN_STRNCMP. */
7644 TREE_OPERAND (exp, 1)
7645 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7646 /* FALLTHROUGH */
7648 case BUILT_IN_STRNCMP:
7649 target = expand_builtin_strncmp (exp, target, mode);
7650 if (target)
7651 return target;
7652 break;
7654 case BUILT_IN_BCMP:
7655 case BUILT_IN_MEMCMP:
7656 case BUILT_IN_MEMCMP_EQ:
7657 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7658 if (target)
7659 return target;
7660 if (fcode == BUILT_IN_MEMCMP_EQ)
7662 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7663 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7665 break;
7667 case BUILT_IN_SETJMP:
7668 /* This should have been lowered to the builtins below. */
7669 gcc_unreachable ();
7671 case BUILT_IN_SETJMP_SETUP:
7672 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7673 and the receiver label. */
7674 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7676 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7677 VOIDmode, EXPAND_NORMAL);
7678 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7679 rtx_insn *label_r = label_rtx (label);
7681 /* This is copied from the handling of non-local gotos. */
7682 expand_builtin_setjmp_setup (buf_addr, label_r);
7683 nonlocal_goto_handler_labels
7684 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7685 nonlocal_goto_handler_labels);
7686 /* ??? Do not let expand_label treat us as such since we would
7687 not want to be both on the list of non-local labels and on
7688 the list of forced labels. */
7689 FORCED_LABEL (label) = 0;
7690 return const0_rtx;
7692 break;
7694 case BUILT_IN_SETJMP_RECEIVER:
7695 /* __builtin_setjmp_receiver is passed the receiver label. */
7696 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7698 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7699 rtx_insn *label_r = label_rtx (label);
7701 expand_builtin_setjmp_receiver (label_r);
7702 return const0_rtx;
7704 break;
7706 /* __builtin_longjmp is passed a pointer to an array of five words.
7707 It's similar to the C library longjmp function but works with
7708 __builtin_setjmp above. */
7709 case BUILT_IN_LONGJMP:
7710 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7712 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7713 VOIDmode, EXPAND_NORMAL);
7714 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7716 if (value != const1_rtx)
7718 error ("%<__builtin_longjmp%> second argument must be 1");
7719 return const0_rtx;
7722 expand_builtin_longjmp (buf_addr, value);
7723 return const0_rtx;
7725 break;
7727 case BUILT_IN_NONLOCAL_GOTO:
7728 target = expand_builtin_nonlocal_goto (exp);
7729 if (target)
7730 return target;
7731 break;
7733 /* This updates the setjmp buffer that is its argument with the value
7734 of the current stack pointer. */
7735 case BUILT_IN_UPDATE_SETJMP_BUF:
7736 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7738 rtx buf_addr
7739 = expand_normal (CALL_EXPR_ARG (exp, 0));
7741 expand_builtin_update_setjmp_buf (buf_addr);
7742 return const0_rtx;
7744 break;
7746 case BUILT_IN_TRAP:
7747 expand_builtin_trap ();
7748 return const0_rtx;
7750 case BUILT_IN_UNREACHABLE:
7751 expand_builtin_unreachable ();
7752 return const0_rtx;
7754 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7755 case BUILT_IN_SIGNBITD32:
7756 case BUILT_IN_SIGNBITD64:
7757 case BUILT_IN_SIGNBITD128:
7758 target = expand_builtin_signbit (exp, target);
7759 if (target)
7760 return target;
7761 break;
7763 /* Various hooks for the DWARF 2 __throw routine. */
7764 case BUILT_IN_UNWIND_INIT:
7765 expand_builtin_unwind_init ();
7766 return const0_rtx;
7767 case BUILT_IN_DWARF_CFA:
7768 return virtual_cfa_rtx;
7769 #ifdef DWARF2_UNWIND_INFO
7770 case BUILT_IN_DWARF_SP_COLUMN:
7771 return expand_builtin_dwarf_sp_column ();
7772 case BUILT_IN_INIT_DWARF_REG_SIZES:
7773 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7774 return const0_rtx;
7775 #endif
7776 case BUILT_IN_FROB_RETURN_ADDR:
7777 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7778 case BUILT_IN_EXTRACT_RETURN_ADDR:
7779 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7780 case BUILT_IN_EH_RETURN:
7781 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7782 CALL_EXPR_ARG (exp, 1));
7783 return const0_rtx;
7784 case BUILT_IN_EH_RETURN_DATA_REGNO:
7785 return expand_builtin_eh_return_data_regno (exp);
7786 case BUILT_IN_EXTEND_POINTER:
7787 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7788 case BUILT_IN_EH_POINTER:
7789 return expand_builtin_eh_pointer (exp);
7790 case BUILT_IN_EH_FILTER:
7791 return expand_builtin_eh_filter (exp);
7792 case BUILT_IN_EH_COPY_VALUES:
7793 return expand_builtin_eh_copy_values (exp);
7795 case BUILT_IN_VA_START:
7796 return expand_builtin_va_start (exp);
7797 case BUILT_IN_VA_END:
7798 return expand_builtin_va_end (exp);
7799 case BUILT_IN_VA_COPY:
7800 return expand_builtin_va_copy (exp);
7801 case BUILT_IN_EXPECT:
7802 return expand_builtin_expect (exp, target);
7803 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7804 return expand_builtin_expect_with_probability (exp, target);
7805 case BUILT_IN_ASSUME_ALIGNED:
7806 return expand_builtin_assume_aligned (exp, target);
7807 case BUILT_IN_PREFETCH:
7808 expand_builtin_prefetch (exp);
7809 return const0_rtx;
7811 case BUILT_IN_INIT_TRAMPOLINE:
7812 return expand_builtin_init_trampoline (exp, true);
7813 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7814 return expand_builtin_init_trampoline (exp, false);
7815 case BUILT_IN_ADJUST_TRAMPOLINE:
7816 return expand_builtin_adjust_trampoline (exp);
7818 case BUILT_IN_INIT_DESCRIPTOR:
7819 return expand_builtin_init_descriptor (exp);
7820 case BUILT_IN_ADJUST_DESCRIPTOR:
7821 return expand_builtin_adjust_descriptor (exp);
7823 case BUILT_IN_FORK:
7824 case BUILT_IN_EXECL:
7825 case BUILT_IN_EXECV:
7826 case BUILT_IN_EXECLP:
7827 case BUILT_IN_EXECLE:
7828 case BUILT_IN_EXECVP:
7829 case BUILT_IN_EXECVE:
7830 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7831 if (target)
7832 return target;
7833 break;
7835 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7836 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7837 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7838 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7839 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7840 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7841 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7842 if (target)
7843 return target;
7844 break;
7846 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7847 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7848 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7849 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7850 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7851 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7852 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7853 if (target)
7854 return target;
7855 break;
7857 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7858 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7859 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7860 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7861 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7862 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7863 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7864 if (target)
7865 return target;
7866 break;
7868 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7869 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7870 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7871 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7872 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7873 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7874 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7875 if (target)
7876 return target;
7877 break;
7879 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7880 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7881 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7882 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7883 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7884 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7885 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7886 if (target)
7887 return target;
7888 break;
7890 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7891 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7892 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7893 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7894 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7895 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7896 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7897 if (target)
7898 return target;
7899 break;
7901 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7902 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7903 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7904 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7905 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7906 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7907 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7908 if (target)
7909 return target;
7910 break;
7912 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7913 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7914 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7915 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7916 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7917 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7918 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7919 if (target)
7920 return target;
7921 break;
7923 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7924 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7925 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7926 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7927 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7928 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7929 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7930 if (target)
7931 return target;
7932 break;
7934 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7935 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7936 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7937 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7938 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7939 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7940 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7941 if (target)
7942 return target;
7943 break;
7945 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7946 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7947 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7948 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7949 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7950 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7951 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7952 if (target)
7953 return target;
7954 break;
7956 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7957 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7958 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7959 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7960 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7961 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7962 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7963 if (target)
7964 return target;
7965 break;
7967 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7968 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7969 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7970 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7971 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7972 if (mode == VOIDmode)
7973 mode = TYPE_MODE (boolean_type_node);
7974 if (!target || !register_operand (target, mode))
7975 target = gen_reg_rtx (mode);
7977 mode = get_builtin_sync_mode
7978 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7979 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7980 if (target)
7981 return target;
7982 break;
7984 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7985 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7986 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7987 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7988 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7989 mode = get_builtin_sync_mode
7990 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7991 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7992 if (target)
7993 return target;
7994 break;
7996 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7997 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7998 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7999 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8000 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8002 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8003 if (target)
8004 return target;
8005 break;
8007 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8008 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8009 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8010 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8011 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8012 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8013 expand_builtin_sync_lock_release (mode, exp);
8014 return const0_rtx;
8016 case BUILT_IN_SYNC_SYNCHRONIZE:
8017 expand_builtin_sync_synchronize ();
8018 return const0_rtx;
8020 case BUILT_IN_ATOMIC_EXCHANGE_1:
8021 case BUILT_IN_ATOMIC_EXCHANGE_2:
8022 case BUILT_IN_ATOMIC_EXCHANGE_4:
8023 case BUILT_IN_ATOMIC_EXCHANGE_8:
8024 case BUILT_IN_ATOMIC_EXCHANGE_16:
8025 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8026 target = expand_builtin_atomic_exchange (mode, exp, target);
8027 if (target)
8028 return target;
8029 break;
8031 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8032 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8033 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8035 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8037 unsigned int nargs, z;
8038 vec<tree, va_gc> *vec;
8040 mode =
8041 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8042 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8043 if (target)
8044 return target;
8046 /* If this is turned into an external library call, the weak parameter
8047 must be dropped to match the expected parameter list. */
8048 nargs = call_expr_nargs (exp);
8049 vec_alloc (vec, nargs - 1);
8050 for (z = 0; z < 3; z++)
8051 vec->quick_push (CALL_EXPR_ARG (exp, z));
8052 /* Skip the boolean weak parameter. */
8053 for (z = 4; z < 6; z++)
8054 vec->quick_push (CALL_EXPR_ARG (exp, z));
8055 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8056 break;
8059 case BUILT_IN_ATOMIC_LOAD_1:
8060 case BUILT_IN_ATOMIC_LOAD_2:
8061 case BUILT_IN_ATOMIC_LOAD_4:
8062 case BUILT_IN_ATOMIC_LOAD_8:
8063 case BUILT_IN_ATOMIC_LOAD_16:
8064 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8065 target = expand_builtin_atomic_load (mode, exp, target);
8066 if (target)
8067 return target;
8068 break;
8070 case BUILT_IN_ATOMIC_STORE_1:
8071 case BUILT_IN_ATOMIC_STORE_2:
8072 case BUILT_IN_ATOMIC_STORE_4:
8073 case BUILT_IN_ATOMIC_STORE_8:
8074 case BUILT_IN_ATOMIC_STORE_16:
8075 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8076 target = expand_builtin_atomic_store (mode, exp);
8077 if (target)
8078 return const0_rtx;
8079 break;
8081 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8082 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8083 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8084 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8085 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8087 enum built_in_function lib;
8088 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8089 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8090 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8091 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8092 ignore, lib);
8093 if (target)
8094 return target;
8095 break;
8097 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8098 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8099 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8100 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8101 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8103 enum built_in_function lib;
8104 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8105 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8106 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8107 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8108 ignore, lib);
8109 if (target)
8110 return target;
8111 break;
8113 case BUILT_IN_ATOMIC_AND_FETCH_1:
8114 case BUILT_IN_ATOMIC_AND_FETCH_2:
8115 case BUILT_IN_ATOMIC_AND_FETCH_4:
8116 case BUILT_IN_ATOMIC_AND_FETCH_8:
8117 case BUILT_IN_ATOMIC_AND_FETCH_16:
8119 enum built_in_function lib;
8120 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8121 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8122 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8123 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8124 ignore, lib);
8125 if (target)
8126 return target;
8127 break;
8129 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8130 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8131 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8132 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8133 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8135 enum built_in_function lib;
8136 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8137 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8138 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8139 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8140 ignore, lib);
8141 if (target)
8142 return target;
8143 break;
8145 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8146 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8147 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8148 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8149 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8151 enum built_in_function lib;
8152 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8153 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8154 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8155 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8156 ignore, lib);
8157 if (target)
8158 return target;
8159 break;
8161 case BUILT_IN_ATOMIC_OR_FETCH_1:
8162 case BUILT_IN_ATOMIC_OR_FETCH_2:
8163 case BUILT_IN_ATOMIC_OR_FETCH_4:
8164 case BUILT_IN_ATOMIC_OR_FETCH_8:
8165 case BUILT_IN_ATOMIC_OR_FETCH_16:
8167 enum built_in_function lib;
8168 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8169 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8170 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8171 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8172 ignore, lib);
8173 if (target)
8174 return target;
8175 break;
8177 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8178 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8179 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8180 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8181 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8182 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8183 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8184 ignore, BUILT_IN_NONE);
8185 if (target)
8186 return target;
8187 break;
8189 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8190 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8191 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8192 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8193 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8194 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8195 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8196 ignore, BUILT_IN_NONE);
8197 if (target)
8198 return target;
8199 break;
8201 case BUILT_IN_ATOMIC_FETCH_AND_1:
8202 case BUILT_IN_ATOMIC_FETCH_AND_2:
8203 case BUILT_IN_ATOMIC_FETCH_AND_4:
8204 case BUILT_IN_ATOMIC_FETCH_AND_8:
8205 case BUILT_IN_ATOMIC_FETCH_AND_16:
8206 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8207 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8208 ignore, BUILT_IN_NONE);
8209 if (target)
8210 return target;
8211 break;
8213 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8214 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8215 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8216 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8217 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8218 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8219 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8220 ignore, BUILT_IN_NONE);
8221 if (target)
8222 return target;
8223 break;
8225 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8226 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8227 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8228 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8229 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8230 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8231 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8232 ignore, BUILT_IN_NONE);
8233 if (target)
8234 return target;
8235 break;
8237 case BUILT_IN_ATOMIC_FETCH_OR_1:
8238 case BUILT_IN_ATOMIC_FETCH_OR_2:
8239 case BUILT_IN_ATOMIC_FETCH_OR_4:
8240 case BUILT_IN_ATOMIC_FETCH_OR_8:
8241 case BUILT_IN_ATOMIC_FETCH_OR_16:
8242 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8243 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8244 ignore, BUILT_IN_NONE);
8245 if (target)
8246 return target;
8247 break;
8249 case BUILT_IN_ATOMIC_TEST_AND_SET:
8250 return expand_builtin_atomic_test_and_set (exp, target);
8252 case BUILT_IN_ATOMIC_CLEAR:
8253 return expand_builtin_atomic_clear (exp);
8255 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8256 return expand_builtin_atomic_always_lock_free (exp);
8258 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8259 target = expand_builtin_atomic_is_lock_free (exp);
8260 if (target)
8261 return target;
8262 break;
8264 case BUILT_IN_ATOMIC_THREAD_FENCE:
8265 expand_builtin_atomic_thread_fence (exp);
8266 return const0_rtx;
8268 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8269 expand_builtin_atomic_signal_fence (exp);
8270 return const0_rtx;
8272 case BUILT_IN_OBJECT_SIZE:
8273 return expand_builtin_object_size (exp);
8275 case BUILT_IN_MEMCPY_CHK:
8276 case BUILT_IN_MEMPCPY_CHK:
8277 case BUILT_IN_MEMMOVE_CHK:
8278 case BUILT_IN_MEMSET_CHK:
8279 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8280 if (target)
8281 return target;
8282 break;
8284 case BUILT_IN_STRCPY_CHK:
8285 case BUILT_IN_STPCPY_CHK:
8286 case BUILT_IN_STRNCPY_CHK:
8287 case BUILT_IN_STPNCPY_CHK:
8288 case BUILT_IN_STRCAT_CHK:
8289 case BUILT_IN_STRNCAT_CHK:
8290 case BUILT_IN_SNPRINTF_CHK:
8291 case BUILT_IN_VSNPRINTF_CHK:
8292 maybe_emit_chk_warning (exp, fcode);
8293 break;
8295 case BUILT_IN_SPRINTF_CHK:
8296 case BUILT_IN_VSPRINTF_CHK:
8297 maybe_emit_sprintf_chk_warning (exp, fcode);
8298 break;
8300 case BUILT_IN_FREE:
8301 if (warn_free_nonheap_object)
8302 maybe_emit_free_warning (exp);
8303 break;
8305 case BUILT_IN_THREAD_POINTER:
8306 return expand_builtin_thread_pointer (exp, target);
8308 case BUILT_IN_SET_THREAD_POINTER:
8309 expand_builtin_set_thread_pointer (exp);
8310 return const0_rtx;
8312 case BUILT_IN_ACC_ON_DEVICE:
8313 /* Do library call, if we failed to expand the builtin when
8314 folding. */
8315 break;
8317 case BUILT_IN_GOACC_PARLEVEL_ID:
8318 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8319 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8321 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8322 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8324 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8325 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8326 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8327 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8328 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8329 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8330 return expand_speculation_safe_value (mode, exp, target, ignore);
8332 default: /* just do library call, if unknown builtin */
8333 break;
8336 /* The switch statement above can drop through to cause the function
8337 to be called normally. */
8338 return expand_call (exp, target, ignore);
8341 /* Determine whether a tree node represents a call to a built-in
8342 function. If the tree T is a call to a built-in function with
8343 the right number of arguments of the appropriate types, return
8344 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8345 Otherwise the return value is END_BUILTINS. */
8347 enum built_in_function
8348 builtin_mathfn_code (const_tree t)
8350 const_tree fndecl, arg, parmlist;
8351 const_tree argtype, parmtype;
8352 const_call_expr_arg_iterator iter;
8354 if (TREE_CODE (t) != CALL_EXPR)
8355 return END_BUILTINS;
8357 fndecl = get_callee_fndecl (t);
8358 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8359 return END_BUILTINS;
8361 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8362 init_const_call_expr_arg_iterator (t, &iter);
8363 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8365 /* If a function doesn't take a variable number of arguments,
8366 the last element in the list will have type `void'. */
8367 parmtype = TREE_VALUE (parmlist);
8368 if (VOID_TYPE_P (parmtype))
8370 if (more_const_call_expr_args_p (&iter))
8371 return END_BUILTINS;
8372 return DECL_FUNCTION_CODE (fndecl);
8375 if (! more_const_call_expr_args_p (&iter))
8376 return END_BUILTINS;
8378 arg = next_const_call_expr_arg (&iter);
8379 argtype = TREE_TYPE (arg);
8381 if (SCALAR_FLOAT_TYPE_P (parmtype))
8383 if (! SCALAR_FLOAT_TYPE_P (argtype))
8384 return END_BUILTINS;
8386 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8388 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8389 return END_BUILTINS;
8391 else if (POINTER_TYPE_P (parmtype))
8393 if (! POINTER_TYPE_P (argtype))
8394 return END_BUILTINS;
8396 else if (INTEGRAL_TYPE_P (parmtype))
8398 if (! INTEGRAL_TYPE_P (argtype))
8399 return END_BUILTINS;
8401 else
8402 return END_BUILTINS;
8405 /* Variable-length argument list. */
8406 return DECL_FUNCTION_CODE (fndecl);
8409 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8410 evaluate to a constant. */
8412 static tree
8413 fold_builtin_constant_p (tree arg)
8415 /* We return 1 for a numeric type that's known to be a constant
8416 value at compile-time or for an aggregate type that's a
8417 literal constant. */
8418 STRIP_NOPS (arg);
8420 /* If we know this is a constant, emit the constant of one. */
8421 if (CONSTANT_CLASS_P (arg)
8422 || (TREE_CODE (arg) == CONSTRUCTOR
8423 && TREE_CONSTANT (arg)))
8424 return integer_one_node;
8425 if (TREE_CODE (arg) == ADDR_EXPR)
8427 tree op = TREE_OPERAND (arg, 0);
8428 if (TREE_CODE (op) == STRING_CST
8429 || (TREE_CODE (op) == ARRAY_REF
8430 && integer_zerop (TREE_OPERAND (op, 1))
8431 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8432 return integer_one_node;
8435 /* If this expression has side effects, show we don't know it to be a
8436 constant. Likewise if it's a pointer or aggregate type since in
8437 those case we only want literals, since those are only optimized
8438 when generating RTL, not later.
8439 And finally, if we are compiling an initializer, not code, we
8440 need to return a definite result now; there's not going to be any
8441 more optimization done. */
8442 if (TREE_SIDE_EFFECTS (arg)
8443 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8444 || POINTER_TYPE_P (TREE_TYPE (arg))
8445 || cfun == 0
8446 || folding_initializer
8447 || force_folding_builtin_constant_p)
8448 return integer_zero_node;
8450 return NULL_TREE;
8453 /* Create builtin_expect or builtin_expect_with_probability
8454 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8455 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8456 builtin_expect_with_probability instead uses third argument as PROBABILITY
8457 value. */
8459 static tree
8460 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8461 tree predictor, tree probability)
8463 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8465 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8466 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8467 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8468 ret_type = TREE_TYPE (TREE_TYPE (fn));
8469 pred_type = TREE_VALUE (arg_types);
8470 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8472 pred = fold_convert_loc (loc, pred_type, pred);
8473 expected = fold_convert_loc (loc, expected_type, expected);
8475 if (probability)
8476 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8477 else
8478 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8479 predictor);
8481 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8482 build_int_cst (ret_type, 0));
8485 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8486 NULL_TREE if no simplification is possible. */
8488 tree
8489 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8490 tree arg3)
8492 tree inner, fndecl, inner_arg0;
8493 enum tree_code code;
8495 /* Distribute the expected value over short-circuiting operators.
8496 See through the cast from truthvalue_type_node to long. */
8497 inner_arg0 = arg0;
8498 while (CONVERT_EXPR_P (inner_arg0)
8499 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8500 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8501 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8503 /* If this is a builtin_expect within a builtin_expect keep the
8504 inner one. See through a comparison against a constant. It
8505 might have been added to create a thruthvalue. */
8506 inner = inner_arg0;
8508 if (COMPARISON_CLASS_P (inner)
8509 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8510 inner = TREE_OPERAND (inner, 0);
8512 if (TREE_CODE (inner) == CALL_EXPR
8513 && (fndecl = get_callee_fndecl (inner))
8514 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8515 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8516 return arg0;
8518 inner = inner_arg0;
8519 code = TREE_CODE (inner);
8520 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8522 tree op0 = TREE_OPERAND (inner, 0);
8523 tree op1 = TREE_OPERAND (inner, 1);
8524 arg1 = save_expr (arg1);
8526 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8527 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8528 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8530 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8533 /* If the argument isn't invariant then there's nothing else we can do. */
8534 if (!TREE_CONSTANT (inner_arg0))
8535 return NULL_TREE;
8537 /* If we expect that a comparison against the argument will fold to
8538 a constant return the constant. In practice, this means a true
8539 constant or the address of a non-weak symbol. */
8540 inner = inner_arg0;
8541 STRIP_NOPS (inner);
8542 if (TREE_CODE (inner) == ADDR_EXPR)
8546 inner = TREE_OPERAND (inner, 0);
8548 while (TREE_CODE (inner) == COMPONENT_REF
8549 || TREE_CODE (inner) == ARRAY_REF);
8550 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8551 return NULL_TREE;
8554 /* Otherwise, ARG0 already has the proper type for the return value. */
8555 return arg0;
8558 /* Fold a call to __builtin_classify_type with argument ARG. */
8560 static tree
8561 fold_builtin_classify_type (tree arg)
8563 if (arg == 0)
8564 return build_int_cst (integer_type_node, no_type_class);
8566 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8569 /* Fold a call to __builtin_strlen with argument ARG. */
8571 static tree
8572 fold_builtin_strlen (location_t loc, tree type, tree arg)
8574 if (!validate_arg (arg, POINTER_TYPE))
8575 return NULL_TREE;
8576 else
8578 c_strlen_data lendata = { };
8579 tree len = c_strlen (arg, 0, &lendata);
8581 if (len)
8582 return fold_convert_loc (loc, type, len);
8584 if (!lendata.decl)
8585 c_strlen (arg, 1, &lendata);
8587 if (lendata.decl)
8589 if (EXPR_HAS_LOCATION (arg))
8590 loc = EXPR_LOCATION (arg);
8591 else if (loc == UNKNOWN_LOCATION)
8592 loc = input_location;
8593 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8596 return NULL_TREE;
8600 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8602 static tree
8603 fold_builtin_inf (location_t loc, tree type, int warn)
8605 REAL_VALUE_TYPE real;
8607 /* __builtin_inff is intended to be usable to define INFINITY on all
8608 targets. If an infinity is not available, INFINITY expands "to a
8609 positive constant of type float that overflows at translation
8610 time", footnote "In this case, using INFINITY will violate the
8611 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8612 Thus we pedwarn to ensure this constraint violation is
8613 diagnosed. */
8614 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8615 pedwarn (loc, 0, "target format does not support infinity");
8617 real_inf (&real);
8618 return build_real (type, real);
8621 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8622 NULL_TREE if no simplification can be made. */
8624 static tree
8625 fold_builtin_sincos (location_t loc,
8626 tree arg0, tree arg1, tree arg2)
8628 tree type;
8629 tree fndecl, call = NULL_TREE;
8631 if (!validate_arg (arg0, REAL_TYPE)
8632 || !validate_arg (arg1, POINTER_TYPE)
8633 || !validate_arg (arg2, POINTER_TYPE))
8634 return NULL_TREE;
8636 type = TREE_TYPE (arg0);
8638 /* Calculate the result when the argument is a constant. */
8639 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8640 if (fn == END_BUILTINS)
8641 return NULL_TREE;
8643 /* Canonicalize sincos to cexpi. */
8644 if (TREE_CODE (arg0) == REAL_CST)
8646 tree complex_type = build_complex_type (type);
8647 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8649 if (!call)
8651 if (!targetm.libc_has_function (function_c99_math_complex)
8652 || !builtin_decl_implicit_p (fn))
8653 return NULL_TREE;
8654 fndecl = builtin_decl_explicit (fn);
8655 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8656 call = builtin_save_expr (call);
8659 tree ptype = build_pointer_type (type);
8660 arg1 = fold_convert (ptype, arg1);
8661 arg2 = fold_convert (ptype, arg2);
8662 return build2 (COMPOUND_EXPR, void_type_node,
8663 build2 (MODIFY_EXPR, void_type_node,
8664 build_fold_indirect_ref_loc (loc, arg1),
8665 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8666 build2 (MODIFY_EXPR, void_type_node,
8667 build_fold_indirect_ref_loc (loc, arg2),
8668 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8671 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8672 Return NULL_TREE if no simplification can be made. */
8674 static tree
8675 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8677 if (!validate_arg (arg1, POINTER_TYPE)
8678 || !validate_arg (arg2, POINTER_TYPE)
8679 || !validate_arg (len, INTEGER_TYPE))
8680 return NULL_TREE;
8682 /* If the LEN parameter is zero, return zero. */
8683 if (integer_zerop (len))
8684 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8685 arg1, arg2);
8687 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8688 if (operand_equal_p (arg1, arg2, 0))
8689 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8691 /* If len parameter is one, return an expression corresponding to
8692 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8693 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8695 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8696 tree cst_uchar_ptr_node
8697 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8699 tree ind1
8700 = fold_convert_loc (loc, integer_type_node,
8701 build1 (INDIRECT_REF, cst_uchar_node,
8702 fold_convert_loc (loc,
8703 cst_uchar_ptr_node,
8704 arg1)));
8705 tree ind2
8706 = fold_convert_loc (loc, integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert_loc (loc,
8709 cst_uchar_ptr_node,
8710 arg2)));
8711 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8714 return NULL_TREE;
8717 /* Fold a call to builtin isascii with argument ARG. */
8719 static tree
8720 fold_builtin_isascii (location_t loc, tree arg)
8722 if (!validate_arg (arg, INTEGER_TYPE))
8723 return NULL_TREE;
8724 else
8726 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8727 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8728 build_int_cst (integer_type_node,
8729 ~ (unsigned HOST_WIDE_INT) 0x7f));
8730 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8731 arg, integer_zero_node);
8735 /* Fold a call to builtin toascii with argument ARG. */
8737 static tree
8738 fold_builtin_toascii (location_t loc, tree arg)
8740 if (!validate_arg (arg, INTEGER_TYPE))
8741 return NULL_TREE;
8743 /* Transform toascii(c) -> (c & 0x7f). */
8744 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8745 build_int_cst (integer_type_node, 0x7f));
8748 /* Fold a call to builtin isdigit with argument ARG. */
8750 static tree
8751 fold_builtin_isdigit (location_t loc, tree arg)
8753 if (!validate_arg (arg, INTEGER_TYPE))
8754 return NULL_TREE;
8755 else
8757 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8758 /* According to the C standard, isdigit is unaffected by locale.
8759 However, it definitely is affected by the target character set. */
8760 unsigned HOST_WIDE_INT target_digit0
8761 = lang_hooks.to_target_charset ('0');
8763 if (target_digit0 == 0)
8764 return NULL_TREE;
8766 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8767 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8768 build_int_cst (unsigned_type_node, target_digit0));
8769 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8770 build_int_cst (unsigned_type_node, 9));
8774 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8776 static tree
8777 fold_builtin_fabs (location_t loc, tree arg, tree type)
8779 if (!validate_arg (arg, REAL_TYPE))
8780 return NULL_TREE;
8782 arg = fold_convert_loc (loc, type, arg);
8783 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8786 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8788 static tree
8789 fold_builtin_abs (location_t loc, tree arg, tree type)
8791 if (!validate_arg (arg, INTEGER_TYPE))
8792 return NULL_TREE;
8794 arg = fold_convert_loc (loc, type, arg);
8795 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8798 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8800 static tree
8801 fold_builtin_carg (location_t loc, tree arg, tree type)
8803 if (validate_arg (arg, COMPLEX_TYPE)
8804 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8806 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8808 if (atan2_fn)
8810 tree new_arg = builtin_save_expr (arg);
8811 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8812 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8813 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8817 return NULL_TREE;
8820 /* Fold a call to builtin frexp, we can assume the base is 2. */
8822 static tree
8823 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8825 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8826 return NULL_TREE;
8828 STRIP_NOPS (arg0);
8830 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8831 return NULL_TREE;
8833 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8835 /* Proceed if a valid pointer type was passed in. */
8836 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8838 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8839 tree frac, exp;
8841 switch (value->cl)
8843 case rvc_zero:
8844 /* For +-0, return (*exp = 0, +-0). */
8845 exp = integer_zero_node;
8846 frac = arg0;
8847 break;
8848 case rvc_nan:
8849 case rvc_inf:
8850 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8851 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8852 case rvc_normal:
8854 /* Since the frexp function always expects base 2, and in
8855 GCC normalized significands are already in the range
8856 [0.5, 1.0), we have exactly what frexp wants. */
8857 REAL_VALUE_TYPE frac_rvt = *value;
8858 SET_REAL_EXP (&frac_rvt, 0);
8859 frac = build_real (rettype, frac_rvt);
8860 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8862 break;
8863 default:
8864 gcc_unreachable ();
8867 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8868 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8869 TREE_SIDE_EFFECTS (arg1) = 1;
8870 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8873 return NULL_TREE;
8876 /* Fold a call to builtin modf. */
8878 static tree
8879 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8881 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8882 return NULL_TREE;
8884 STRIP_NOPS (arg0);
8886 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8887 return NULL_TREE;
8889 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8891 /* Proceed if a valid pointer type was passed in. */
8892 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8894 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8895 REAL_VALUE_TYPE trunc, frac;
8897 switch (value->cl)
8899 case rvc_nan:
8900 case rvc_zero:
8901 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8902 trunc = frac = *value;
8903 break;
8904 case rvc_inf:
8905 /* For +-Inf, return (*arg1 = arg0, +-0). */
8906 frac = dconst0;
8907 frac.sign = value->sign;
8908 trunc = *value;
8909 break;
8910 case rvc_normal:
8911 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8912 real_trunc (&trunc, VOIDmode, value);
8913 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8914 /* If the original number was negative and already
8915 integral, then the fractional part is -0.0. */
8916 if (value->sign && frac.cl == rvc_zero)
8917 frac.sign = value->sign;
8918 break;
8921 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8922 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8923 build_real (rettype, trunc));
8924 TREE_SIDE_EFFECTS (arg1) = 1;
8925 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8926 build_real (rettype, frac));
8929 return NULL_TREE;
8932 /* Given a location LOC, an interclass builtin function decl FNDECL
8933 and its single argument ARG, return an folded expression computing
8934 the same, or NULL_TREE if we either couldn't or didn't want to fold
8935 (the latter happen if there's an RTL instruction available). */
8937 static tree
8938 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8940 machine_mode mode;
8942 if (!validate_arg (arg, REAL_TYPE))
8943 return NULL_TREE;
8945 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8946 return NULL_TREE;
8948 mode = TYPE_MODE (TREE_TYPE (arg));
8950 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8952 /* If there is no optab, try generic code. */
8953 switch (DECL_FUNCTION_CODE (fndecl))
8955 tree result;
8957 CASE_FLT_FN (BUILT_IN_ISINF):
8959 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8960 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8961 tree type = TREE_TYPE (arg);
8962 REAL_VALUE_TYPE r;
8963 char buf[128];
8965 if (is_ibm_extended)
8967 /* NaN and Inf are encoded in the high-order double value
8968 only. The low-order value is not significant. */
8969 type = double_type_node;
8970 mode = DFmode;
8971 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8973 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8974 real_from_string (&r, buf);
8975 result = build_call_expr (isgr_fn, 2,
8976 fold_build1_loc (loc, ABS_EXPR, type, arg),
8977 build_real (type, r));
8978 return result;
8980 CASE_FLT_FN (BUILT_IN_FINITE):
8981 case BUILT_IN_ISFINITE:
8983 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8984 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8985 tree type = TREE_TYPE (arg);
8986 REAL_VALUE_TYPE r;
8987 char buf[128];
8989 if (is_ibm_extended)
8991 /* NaN and Inf are encoded in the high-order double value
8992 only. The low-order value is not significant. */
8993 type = double_type_node;
8994 mode = DFmode;
8995 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8997 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8998 real_from_string (&r, buf);
8999 result = build_call_expr (isle_fn, 2,
9000 fold_build1_loc (loc, ABS_EXPR, type, arg),
9001 build_real (type, r));
9002 /*result = fold_build2_loc (loc, UNGT_EXPR,
9003 TREE_TYPE (TREE_TYPE (fndecl)),
9004 fold_build1_loc (loc, ABS_EXPR, type, arg),
9005 build_real (type, r));
9006 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9007 TREE_TYPE (TREE_TYPE (fndecl)),
9008 result);*/
9009 return result;
9011 case BUILT_IN_ISNORMAL:
9013 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9014 islessequal(fabs(x),DBL_MAX). */
9015 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9016 tree type = TREE_TYPE (arg);
9017 tree orig_arg, max_exp, min_exp;
9018 machine_mode orig_mode = mode;
9019 REAL_VALUE_TYPE rmax, rmin;
9020 char buf[128];
9022 orig_arg = arg = builtin_save_expr (arg);
9023 if (is_ibm_extended)
9025 /* Use double to test the normal range of IBM extended
9026 precision. Emin for IBM extended precision is
9027 different to emin for IEEE double, being 53 higher
9028 since the low double exponent is at least 53 lower
9029 than the high double exponent. */
9030 type = double_type_node;
9031 mode = DFmode;
9032 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9034 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9036 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9037 real_from_string (&rmax, buf);
9038 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9039 real_from_string (&rmin, buf);
9040 max_exp = build_real (type, rmax);
9041 min_exp = build_real (type, rmin);
9043 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9044 if (is_ibm_extended)
9046 /* Testing the high end of the range is done just using
9047 the high double, using the same test as isfinite().
9048 For the subnormal end of the range we first test the
9049 high double, then if its magnitude is equal to the
9050 limit of 0x1p-969, we test whether the low double is
9051 non-zero and opposite sign to the high double. */
9052 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9053 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9054 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9055 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9056 arg, min_exp);
9057 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9058 complex_double_type_node, orig_arg);
9059 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9060 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9061 tree zero = build_real (type, dconst0);
9062 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9063 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9064 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9065 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9066 fold_build3 (COND_EXPR,
9067 integer_type_node,
9068 hilt, logt, lolt));
9069 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9070 eq_min, ok_lo);
9071 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9072 gt_min, eq_min);
9074 else
9076 tree const isge_fn
9077 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9078 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9080 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9081 max_exp, min_exp);
9082 return result;
9084 default:
9085 break;
9088 return NULL_TREE;
9091 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9092 ARG is the argument for the call. */
9094 static tree
9095 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9097 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9099 if (!validate_arg (arg, REAL_TYPE))
9100 return NULL_TREE;
9102 switch (builtin_index)
9104 case BUILT_IN_ISINF:
9105 if (!HONOR_INFINITIES (arg))
9106 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9108 return NULL_TREE;
9110 case BUILT_IN_ISINF_SIGN:
9112 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9113 /* In a boolean context, GCC will fold the inner COND_EXPR to
9114 1. So e.g. "if (isinf_sign(x))" would be folded to just
9115 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9116 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9117 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9118 tree tmp = NULL_TREE;
9120 arg = builtin_save_expr (arg);
9122 if (signbit_fn && isinf_fn)
9124 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9125 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9127 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9128 signbit_call, integer_zero_node);
9129 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9130 isinf_call, integer_zero_node);
9132 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9133 integer_minus_one_node, integer_one_node);
9134 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9135 isinf_call, tmp,
9136 integer_zero_node);
9139 return tmp;
9142 case BUILT_IN_ISFINITE:
9143 if (!HONOR_NANS (arg)
9144 && !HONOR_INFINITIES (arg))
9145 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9147 return NULL_TREE;
9149 case BUILT_IN_ISNAN:
9150 if (!HONOR_NANS (arg))
9151 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9154 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9155 if (is_ibm_extended)
9157 /* NaN and Inf are encoded in the high-order double value
9158 only. The low-order value is not significant. */
9159 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9162 arg = builtin_save_expr (arg);
9163 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9165 default:
9166 gcc_unreachable ();
9170 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9171 This builtin will generate code to return the appropriate floating
9172 point classification depending on the value of the floating point
9173 number passed in. The possible return values must be supplied as
9174 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9175 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9176 one floating point argument which is "type generic". */
9178 static tree
9179 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9181 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9182 arg, type, res, tmp;
9183 machine_mode mode;
9184 REAL_VALUE_TYPE r;
9185 char buf[128];
9187 /* Verify the required arguments in the original call. */
9188 if (nargs != 6
9189 || !validate_arg (args[0], INTEGER_TYPE)
9190 || !validate_arg (args[1], INTEGER_TYPE)
9191 || !validate_arg (args[2], INTEGER_TYPE)
9192 || !validate_arg (args[3], INTEGER_TYPE)
9193 || !validate_arg (args[4], INTEGER_TYPE)
9194 || !validate_arg (args[5], REAL_TYPE))
9195 return NULL_TREE;
9197 fp_nan = args[0];
9198 fp_infinite = args[1];
9199 fp_normal = args[2];
9200 fp_subnormal = args[3];
9201 fp_zero = args[4];
9202 arg = args[5];
9203 type = TREE_TYPE (arg);
9204 mode = TYPE_MODE (type);
9205 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9207 /* fpclassify(x) ->
9208 isnan(x) ? FP_NAN :
9209 (fabs(x) == Inf ? FP_INFINITE :
9210 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9211 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9213 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9214 build_real (type, dconst0));
9215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9216 tmp, fp_zero, fp_subnormal);
9218 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9219 real_from_string (&r, buf);
9220 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9221 arg, build_real (type, r));
9222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9224 if (HONOR_INFINITIES (mode))
9226 real_inf (&r);
9227 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9228 build_real (type, r));
9229 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9230 fp_infinite, res);
9233 if (HONOR_NANS (mode))
9235 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9236 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9239 return res;
9242 /* Fold a call to an unordered comparison function such as
9243 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9244 being called and ARG0 and ARG1 are the arguments for the call.
9245 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9246 the opposite of the desired result. UNORDERED_CODE is used
9247 for modes that can hold NaNs and ORDERED_CODE is used for
9248 the rest. */
9250 static tree
9251 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9252 enum tree_code unordered_code,
9253 enum tree_code ordered_code)
9255 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9256 enum tree_code code;
9257 tree type0, type1;
9258 enum tree_code code0, code1;
9259 tree cmp_type = NULL_TREE;
9261 type0 = TREE_TYPE (arg0);
9262 type1 = TREE_TYPE (arg1);
9264 code0 = TREE_CODE (type0);
9265 code1 = TREE_CODE (type1);
9267 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9268 /* Choose the wider of two real types. */
9269 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9270 ? type0 : type1;
9271 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9272 cmp_type = type0;
9273 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9274 cmp_type = type1;
9276 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9277 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9279 if (unordered_code == UNORDERED_EXPR)
9281 if (!HONOR_NANS (arg0))
9282 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9283 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9286 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9287 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9288 fold_build2_loc (loc, code, type, arg0, arg1));
9291 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9292 arithmetics if it can never overflow, or into internal functions that
9293 return both result of arithmetics and overflowed boolean flag in
9294 a complex integer result, or some other check for overflow.
9295 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9296 checking part of that. */
9298 static tree
9299 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9300 tree arg0, tree arg1, tree arg2)
9302 enum internal_fn ifn = IFN_LAST;
9303 /* The code of the expression corresponding to the built-in. */
9304 enum tree_code opcode = ERROR_MARK;
9305 bool ovf_only = false;
9307 switch (fcode)
9309 case BUILT_IN_ADD_OVERFLOW_P:
9310 ovf_only = true;
9311 /* FALLTHRU */
9312 case BUILT_IN_ADD_OVERFLOW:
9313 case BUILT_IN_SADD_OVERFLOW:
9314 case BUILT_IN_SADDL_OVERFLOW:
9315 case BUILT_IN_SADDLL_OVERFLOW:
9316 case BUILT_IN_UADD_OVERFLOW:
9317 case BUILT_IN_UADDL_OVERFLOW:
9318 case BUILT_IN_UADDLL_OVERFLOW:
9319 opcode = PLUS_EXPR;
9320 ifn = IFN_ADD_OVERFLOW;
9321 break;
9322 case BUILT_IN_SUB_OVERFLOW_P:
9323 ovf_only = true;
9324 /* FALLTHRU */
9325 case BUILT_IN_SUB_OVERFLOW:
9326 case BUILT_IN_SSUB_OVERFLOW:
9327 case BUILT_IN_SSUBL_OVERFLOW:
9328 case BUILT_IN_SSUBLL_OVERFLOW:
9329 case BUILT_IN_USUB_OVERFLOW:
9330 case BUILT_IN_USUBL_OVERFLOW:
9331 case BUILT_IN_USUBLL_OVERFLOW:
9332 opcode = MINUS_EXPR;
9333 ifn = IFN_SUB_OVERFLOW;
9334 break;
9335 case BUILT_IN_MUL_OVERFLOW_P:
9336 ovf_only = true;
9337 /* FALLTHRU */
9338 case BUILT_IN_MUL_OVERFLOW:
9339 case BUILT_IN_SMUL_OVERFLOW:
9340 case BUILT_IN_SMULL_OVERFLOW:
9341 case BUILT_IN_SMULLL_OVERFLOW:
9342 case BUILT_IN_UMUL_OVERFLOW:
9343 case BUILT_IN_UMULL_OVERFLOW:
9344 case BUILT_IN_UMULLL_OVERFLOW:
9345 opcode = MULT_EXPR;
9346 ifn = IFN_MUL_OVERFLOW;
9347 break;
9348 default:
9349 gcc_unreachable ();
9352 /* For the "generic" overloads, the first two arguments can have different
9353 types and the last argument determines the target type to use to check
9354 for overflow. The arguments of the other overloads all have the same
9355 type. */
9356 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9358 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9359 arguments are constant, attempt to fold the built-in call into a constant
9360 expression indicating whether or not it detected an overflow. */
9361 if (ovf_only
9362 && TREE_CODE (arg0) == INTEGER_CST
9363 && TREE_CODE (arg1) == INTEGER_CST)
9364 /* Perform the computation in the target type and check for overflow. */
9365 return omit_one_operand_loc (loc, boolean_type_node,
9366 arith_overflowed_p (opcode, type, arg0, arg1)
9367 ? boolean_true_node : boolean_false_node,
9368 arg2);
9370 tree intres, ovfres;
9371 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9373 intres = fold_binary_loc (loc, opcode, type,
9374 fold_convert_loc (loc, type, arg0),
9375 fold_convert_loc (loc, type, arg1));
9376 if (TREE_OVERFLOW (intres))
9377 intres = drop_tree_overflow (intres);
9378 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9379 ? boolean_true_node : boolean_false_node);
9381 else
9383 tree ctype = build_complex_type (type);
9384 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9385 arg0, arg1);
9386 tree tgt = save_expr (call);
9387 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9388 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9389 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9392 if (ovf_only)
9393 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9395 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9396 tree store
9397 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9398 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9401 /* Fold a call to __builtin_FILE to a constant string. */
9403 static inline tree
9404 fold_builtin_FILE (location_t loc)
9406 if (const char *fname = LOCATION_FILE (loc))
9408 /* The documentation says this builtin is equivalent to the preprocessor
9409 __FILE__ macro so it appears appropriate to use the same file prefix
9410 mappings. */
9411 fname = remap_macro_filename (fname);
9412 return build_string_literal (strlen (fname) + 1, fname);
9415 return build_string_literal (1, "");
9418 /* Fold a call to __builtin_FUNCTION to a constant string. */
9420 static inline tree
9421 fold_builtin_FUNCTION ()
9423 const char *name = "";
9425 if (current_function_decl)
9426 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9428 return build_string_literal (strlen (name) + 1, name);
9431 /* Fold a call to __builtin_LINE to an integer constant. */
9433 static inline tree
9434 fold_builtin_LINE (location_t loc, tree type)
9436 return build_int_cst (type, LOCATION_LINE (loc));
9439 /* Fold a call to built-in function FNDECL with 0 arguments.
9440 This function returns NULL_TREE if no simplification was possible. */
9442 static tree
9443 fold_builtin_0 (location_t loc, tree fndecl)
9445 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9446 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9447 switch (fcode)
9449 case BUILT_IN_FILE:
9450 return fold_builtin_FILE (loc);
9452 case BUILT_IN_FUNCTION:
9453 return fold_builtin_FUNCTION ();
9455 case BUILT_IN_LINE:
9456 return fold_builtin_LINE (loc, type);
9458 CASE_FLT_FN (BUILT_IN_INF):
9459 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9460 case BUILT_IN_INFD32:
9461 case BUILT_IN_INFD64:
9462 case BUILT_IN_INFD128:
9463 return fold_builtin_inf (loc, type, true);
9465 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9466 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9467 return fold_builtin_inf (loc, type, false);
9469 case BUILT_IN_CLASSIFY_TYPE:
9470 return fold_builtin_classify_type (NULL_TREE);
9472 default:
9473 break;
9475 return NULL_TREE;
9478 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9479 This function returns NULL_TREE if no simplification was possible. */
9481 static tree
9482 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9484 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9485 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9487 if (TREE_CODE (arg0) == ERROR_MARK)
9488 return NULL_TREE;
9490 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9491 return ret;
9493 switch (fcode)
9495 case BUILT_IN_CONSTANT_P:
9497 tree val = fold_builtin_constant_p (arg0);
9499 /* Gimplification will pull the CALL_EXPR for the builtin out of
9500 an if condition. When not optimizing, we'll not CSE it back.
9501 To avoid link error types of regressions, return false now. */
9502 if (!val && !optimize)
9503 val = integer_zero_node;
9505 return val;
9508 case BUILT_IN_CLASSIFY_TYPE:
9509 return fold_builtin_classify_type (arg0);
9511 case BUILT_IN_STRLEN:
9512 return fold_builtin_strlen (loc, type, arg0);
9514 CASE_FLT_FN (BUILT_IN_FABS):
9515 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9516 case BUILT_IN_FABSD32:
9517 case BUILT_IN_FABSD64:
9518 case BUILT_IN_FABSD128:
9519 return fold_builtin_fabs (loc, arg0, type);
9521 case BUILT_IN_ABS:
9522 case BUILT_IN_LABS:
9523 case BUILT_IN_LLABS:
9524 case BUILT_IN_IMAXABS:
9525 return fold_builtin_abs (loc, arg0, type);
9527 CASE_FLT_FN (BUILT_IN_CONJ):
9528 if (validate_arg (arg0, COMPLEX_TYPE)
9529 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9530 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9531 break;
9533 CASE_FLT_FN (BUILT_IN_CREAL):
9534 if (validate_arg (arg0, COMPLEX_TYPE)
9535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9536 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9537 break;
9539 CASE_FLT_FN (BUILT_IN_CIMAG):
9540 if (validate_arg (arg0, COMPLEX_TYPE)
9541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9542 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9543 break;
9545 CASE_FLT_FN (BUILT_IN_CARG):
9546 return fold_builtin_carg (loc, arg0, type);
9548 case BUILT_IN_ISASCII:
9549 return fold_builtin_isascii (loc, arg0);
9551 case BUILT_IN_TOASCII:
9552 return fold_builtin_toascii (loc, arg0);
9554 case BUILT_IN_ISDIGIT:
9555 return fold_builtin_isdigit (loc, arg0);
9557 CASE_FLT_FN (BUILT_IN_FINITE):
9558 case BUILT_IN_FINITED32:
9559 case BUILT_IN_FINITED64:
9560 case BUILT_IN_FINITED128:
9561 case BUILT_IN_ISFINITE:
9563 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9564 if (ret)
9565 return ret;
9566 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9569 CASE_FLT_FN (BUILT_IN_ISINF):
9570 case BUILT_IN_ISINFD32:
9571 case BUILT_IN_ISINFD64:
9572 case BUILT_IN_ISINFD128:
9574 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9575 if (ret)
9576 return ret;
9577 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9580 case BUILT_IN_ISNORMAL:
9581 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9583 case BUILT_IN_ISINF_SIGN:
9584 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9586 CASE_FLT_FN (BUILT_IN_ISNAN):
9587 case BUILT_IN_ISNAND32:
9588 case BUILT_IN_ISNAND64:
9589 case BUILT_IN_ISNAND128:
9590 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9592 case BUILT_IN_FREE:
9593 if (integer_zerop (arg0))
9594 return build_empty_stmt (loc);
9595 break;
9597 default:
9598 break;
9601 return NULL_TREE;
9605 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9606 This function returns NULL_TREE if no simplification was possible. */
9608 static tree
9609 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9611 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9612 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9614 if (TREE_CODE (arg0) == ERROR_MARK
9615 || TREE_CODE (arg1) == ERROR_MARK)
9616 return NULL_TREE;
9618 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9619 return ret;
9621 switch (fcode)
9623 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9624 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9625 if (validate_arg (arg0, REAL_TYPE)
9626 && validate_arg (arg1, POINTER_TYPE))
9627 return do_mpfr_lgamma_r (arg0, arg1, type);
9628 break;
9630 CASE_FLT_FN (BUILT_IN_FREXP):
9631 return fold_builtin_frexp (loc, arg0, arg1, type);
9633 CASE_FLT_FN (BUILT_IN_MODF):
9634 return fold_builtin_modf (loc, arg0, arg1, type);
9636 case BUILT_IN_STRSPN:
9637 return fold_builtin_strspn (loc, arg0, arg1);
9639 case BUILT_IN_STRCSPN:
9640 return fold_builtin_strcspn (loc, arg0, arg1);
9642 case BUILT_IN_STRPBRK:
9643 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9645 case BUILT_IN_EXPECT:
9646 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9648 case BUILT_IN_ISGREATER:
9649 return fold_builtin_unordered_cmp (loc, fndecl,
9650 arg0, arg1, UNLE_EXPR, LE_EXPR);
9651 case BUILT_IN_ISGREATEREQUAL:
9652 return fold_builtin_unordered_cmp (loc, fndecl,
9653 arg0, arg1, UNLT_EXPR, LT_EXPR);
9654 case BUILT_IN_ISLESS:
9655 return fold_builtin_unordered_cmp (loc, fndecl,
9656 arg0, arg1, UNGE_EXPR, GE_EXPR);
9657 case BUILT_IN_ISLESSEQUAL:
9658 return fold_builtin_unordered_cmp (loc, fndecl,
9659 arg0, arg1, UNGT_EXPR, GT_EXPR);
9660 case BUILT_IN_ISLESSGREATER:
9661 return fold_builtin_unordered_cmp (loc, fndecl,
9662 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9663 case BUILT_IN_ISUNORDERED:
9664 return fold_builtin_unordered_cmp (loc, fndecl,
9665 arg0, arg1, UNORDERED_EXPR,
9666 NOP_EXPR);
9668 /* We do the folding for va_start in the expander. */
9669 case BUILT_IN_VA_START:
9670 break;
9672 case BUILT_IN_OBJECT_SIZE:
9673 return fold_builtin_object_size (arg0, arg1);
9675 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9676 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9678 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9679 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9681 default:
9682 break;
9684 return NULL_TREE;
9687 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9688 and ARG2.
9689 This function returns NULL_TREE if no simplification was possible. */
9691 static tree
9692 fold_builtin_3 (location_t loc, tree fndecl,
9693 tree arg0, tree arg1, tree arg2)
9695 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9696 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9698 if (TREE_CODE (arg0) == ERROR_MARK
9699 || TREE_CODE (arg1) == ERROR_MARK
9700 || TREE_CODE (arg2) == ERROR_MARK)
9701 return NULL_TREE;
9703 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9704 arg0, arg1, arg2))
9705 return ret;
9707 switch (fcode)
9710 CASE_FLT_FN (BUILT_IN_SINCOS):
9711 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9713 CASE_FLT_FN (BUILT_IN_REMQUO):
9714 if (validate_arg (arg0, REAL_TYPE)
9715 && validate_arg (arg1, REAL_TYPE)
9716 && validate_arg (arg2, POINTER_TYPE))
9717 return do_mpfr_remquo (arg0, arg1, arg2);
9718 break;
9720 case BUILT_IN_MEMCMP:
9721 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9723 case BUILT_IN_EXPECT:
9724 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9726 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9727 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9729 case BUILT_IN_ADD_OVERFLOW:
9730 case BUILT_IN_SUB_OVERFLOW:
9731 case BUILT_IN_MUL_OVERFLOW:
9732 case BUILT_IN_ADD_OVERFLOW_P:
9733 case BUILT_IN_SUB_OVERFLOW_P:
9734 case BUILT_IN_MUL_OVERFLOW_P:
9735 case BUILT_IN_SADD_OVERFLOW:
9736 case BUILT_IN_SADDL_OVERFLOW:
9737 case BUILT_IN_SADDLL_OVERFLOW:
9738 case BUILT_IN_SSUB_OVERFLOW:
9739 case BUILT_IN_SSUBL_OVERFLOW:
9740 case BUILT_IN_SSUBLL_OVERFLOW:
9741 case BUILT_IN_SMUL_OVERFLOW:
9742 case BUILT_IN_SMULL_OVERFLOW:
9743 case BUILT_IN_SMULLL_OVERFLOW:
9744 case BUILT_IN_UADD_OVERFLOW:
9745 case BUILT_IN_UADDL_OVERFLOW:
9746 case BUILT_IN_UADDLL_OVERFLOW:
9747 case BUILT_IN_USUB_OVERFLOW:
9748 case BUILT_IN_USUBL_OVERFLOW:
9749 case BUILT_IN_USUBLL_OVERFLOW:
9750 case BUILT_IN_UMUL_OVERFLOW:
9751 case BUILT_IN_UMULL_OVERFLOW:
9752 case BUILT_IN_UMULLL_OVERFLOW:
9753 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9755 default:
9756 break;
9758 return NULL_TREE;
9761 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9762 arguments. IGNORE is true if the result of the
9763 function call is ignored. This function returns NULL_TREE if no
9764 simplification was possible. */
9766 tree
9767 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9769 tree ret = NULL_TREE;
9771 switch (nargs)
9773 case 0:
9774 ret = fold_builtin_0 (loc, fndecl);
9775 break;
9776 case 1:
9777 ret = fold_builtin_1 (loc, fndecl, args[0]);
9778 break;
9779 case 2:
9780 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9781 break;
9782 case 3:
9783 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9784 break;
9785 default:
9786 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9787 break;
9789 if (ret)
9791 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9792 SET_EXPR_LOCATION (ret, loc);
9793 return ret;
9795 return NULL_TREE;
9798 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9799 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9800 of arguments in ARGS to be omitted. OLDNARGS is the number of
9801 elements in ARGS. */
9803 static tree
9804 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9805 int skip, tree fndecl, int n, va_list newargs)
9807 int nargs = oldnargs - skip + n;
9808 tree *buffer;
9810 if (n > 0)
9812 int i, j;
9814 buffer = XALLOCAVEC (tree, nargs);
9815 for (i = 0; i < n; i++)
9816 buffer[i] = va_arg (newargs, tree);
9817 for (j = skip; j < oldnargs; j++, i++)
9818 buffer[i] = args[j];
9820 else
9821 buffer = args + skip;
9823 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9826 /* Return true if FNDECL shouldn't be folded right now.
9827 If a built-in function has an inline attribute always_inline
9828 wrapper, defer folding it after always_inline functions have
9829 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9830 might not be performed. */
9832 bool
9833 avoid_folding_inline_builtin (tree fndecl)
9835 return (DECL_DECLARED_INLINE_P (fndecl)
9836 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9837 && cfun
9838 && !cfun->always_inline_functions_inlined
9839 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9842 /* A wrapper function for builtin folding that prevents warnings for
9843 "statement without effect" and the like, caused by removing the
9844 call node earlier than the warning is generated. */
9846 tree
9847 fold_call_expr (location_t loc, tree exp, bool ignore)
9849 tree ret = NULL_TREE;
9850 tree fndecl = get_callee_fndecl (exp);
9851 if (fndecl && fndecl_built_in_p (fndecl)
9852 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9853 yet. Defer folding until we see all the arguments
9854 (after inlining). */
9855 && !CALL_EXPR_VA_ARG_PACK (exp))
9857 int nargs = call_expr_nargs (exp);
9859 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9860 instead last argument is __builtin_va_arg_pack (). Defer folding
9861 even in that case, until arguments are finalized. */
9862 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9864 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9865 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9866 return NULL_TREE;
9869 if (avoid_folding_inline_builtin (fndecl))
9870 return NULL_TREE;
9872 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9873 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9874 CALL_EXPR_ARGP (exp), ignore);
9875 else
9877 tree *args = CALL_EXPR_ARGP (exp);
9878 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9879 if (ret)
9880 return ret;
9883 return NULL_TREE;
9886 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9887 N arguments are passed in the array ARGARRAY. Return a folded
9888 expression or NULL_TREE if no simplification was possible. */
9890 tree
9891 fold_builtin_call_array (location_t loc, tree,
9892 tree fn,
9893 int n,
9894 tree *argarray)
9896 if (TREE_CODE (fn) != ADDR_EXPR)
9897 return NULL_TREE;
9899 tree fndecl = TREE_OPERAND (fn, 0);
9900 if (TREE_CODE (fndecl) == FUNCTION_DECL
9901 && fndecl_built_in_p (fndecl))
9903 /* If last argument is __builtin_va_arg_pack (), arguments to this
9904 function are not finalized yet. Defer folding until they are. */
9905 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9907 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9908 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9909 return NULL_TREE;
9911 if (avoid_folding_inline_builtin (fndecl))
9912 return NULL_TREE;
9913 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9914 return targetm.fold_builtin (fndecl, n, argarray, false);
9915 else
9916 return fold_builtin_n (loc, fndecl, argarray, n, false);
9919 return NULL_TREE;
9922 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9923 along with N new arguments specified as the "..." parameters. SKIP
9924 is the number of arguments in EXP to be omitted. This function is used
9925 to do varargs-to-varargs transformations. */
9927 static tree
9928 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9930 va_list ap;
9931 tree t;
9933 va_start (ap, n);
9934 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9935 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9936 va_end (ap);
9938 return t;
9941 /* Validate a single argument ARG against a tree code CODE representing
9942 a type. Return true when argument is valid. */
9944 static bool
9945 validate_arg (const_tree arg, enum tree_code code)
9947 if (!arg)
9948 return false;
9949 else if (code == POINTER_TYPE)
9950 return POINTER_TYPE_P (TREE_TYPE (arg));
9951 else if (code == INTEGER_TYPE)
9952 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9953 return code == TREE_CODE (TREE_TYPE (arg));
9956 /* This function validates the types of a function call argument list
9957 against a specified list of tree_codes. If the last specifier is a 0,
9958 that represents an ellipses, otherwise the last specifier must be a
9959 VOID_TYPE.
9961 This is the GIMPLE version of validate_arglist. Eventually we want to
9962 completely convert builtins.c to work from GIMPLEs and the tree based
9963 validate_arglist will then be removed. */
9965 bool
9966 validate_gimple_arglist (const gcall *call, ...)
9968 enum tree_code code;
9969 bool res = 0;
9970 va_list ap;
9971 const_tree arg;
9972 size_t i;
9974 va_start (ap, call);
9975 i = 0;
9979 code = (enum tree_code) va_arg (ap, int);
9980 switch (code)
9982 case 0:
9983 /* This signifies an ellipses, any further arguments are all ok. */
9984 res = true;
9985 goto end;
9986 case VOID_TYPE:
9987 /* This signifies an endlink, if no arguments remain, return
9988 true, otherwise return false. */
9989 res = (i == gimple_call_num_args (call));
9990 goto end;
9991 default:
9992 /* If no parameters remain or the parameter's code does not
9993 match the specified code, return false. Otherwise continue
9994 checking any remaining arguments. */
9995 arg = gimple_call_arg (call, i++);
9996 if (!validate_arg (arg, code))
9997 goto end;
9998 break;
10001 while (1);
10003 /* We need gotos here since we can only have one VA_CLOSE in a
10004 function. */
10005 end: ;
10006 va_end (ap);
10008 return res;
10011 /* Default target-specific builtin expander that does nothing. */
10014 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10015 rtx target ATTRIBUTE_UNUSED,
10016 rtx subtarget ATTRIBUTE_UNUSED,
10017 machine_mode mode ATTRIBUTE_UNUSED,
10018 int ignore ATTRIBUTE_UNUSED)
10020 return NULL_RTX;
10023 /* Returns true is EXP represents data that would potentially reside
10024 in a readonly section. */
10026 bool
10027 readonly_data_expr (tree exp)
10029 STRIP_NOPS (exp);
10031 if (TREE_CODE (exp) != ADDR_EXPR)
10032 return false;
10034 exp = get_base_address (TREE_OPERAND (exp, 0));
10035 if (!exp)
10036 return false;
10038 /* Make sure we call decl_readonly_section only for trees it
10039 can handle (since it returns true for everything it doesn't
10040 understand). */
10041 if (TREE_CODE (exp) == STRING_CST
10042 || TREE_CODE (exp) == CONSTRUCTOR
10043 || (VAR_P (exp) && TREE_STATIC (exp)))
10044 return decl_readonly_section (exp, 0);
10045 else
10046 return false;
10049 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10050 to the call, and TYPE is its return type.
10052 Return NULL_TREE if no simplification was possible, otherwise return the
10053 simplified form of the call as a tree.
10055 The simplified form may be a constant or other expression which
10056 computes the same value, but in a more efficient manner (including
10057 calls to other builtin functions).
10059 The call may contain arguments which need to be evaluated, but
10060 which are not useful to determine the result of the call. In
10061 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10062 COMPOUND_EXPR will be an argument which must be evaluated.
10063 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10064 COMPOUND_EXPR in the chain will contain the tree for the simplified
10065 form of the builtin function call. */
10067 static tree
10068 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10070 if (!validate_arg (s1, POINTER_TYPE)
10071 || !validate_arg (s2, POINTER_TYPE))
10072 return NULL_TREE;
10073 else
10075 tree fn;
10076 const char *p1, *p2;
10078 p2 = c_getstr (s2);
10079 if (p2 == NULL)
10080 return NULL_TREE;
10082 p1 = c_getstr (s1);
10083 if (p1 != NULL)
10085 const char *r = strpbrk (p1, p2);
10086 tree tem;
10088 if (r == NULL)
10089 return build_int_cst (TREE_TYPE (s1), 0);
10091 /* Return an offset into the constant string argument. */
10092 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10093 return fold_convert_loc (loc, type, tem);
10096 if (p2[0] == '\0')
10097 /* strpbrk(x, "") == NULL.
10098 Evaluate and ignore s1 in case it had side-effects. */
10099 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10101 if (p2[1] != '\0')
10102 return NULL_TREE; /* Really call strpbrk. */
10104 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10105 if (!fn)
10106 return NULL_TREE;
10108 /* New argument list transforming strpbrk(s1, s2) to
10109 strchr(s1, s2[0]). */
10110 return build_call_expr_loc (loc, fn, 2, s1,
10111 build_int_cst (integer_type_node, p2[0]));
10115 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10116 to the call.
10118 Return NULL_TREE if no simplification was possible, otherwise return the
10119 simplified form of the call as a tree.
10121 The simplified form may be a constant or other expression which
10122 computes the same value, but in a more efficient manner (including
10123 calls to other builtin functions).
10125 The call may contain arguments which need to be evaluated, but
10126 which are not useful to determine the result of the call. In
10127 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10128 COMPOUND_EXPR will be an argument which must be evaluated.
10129 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10130 COMPOUND_EXPR in the chain will contain the tree for the simplified
10131 form of the builtin function call. */
10133 static tree
10134 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10136 if (!validate_arg (s1, POINTER_TYPE)
10137 || !validate_arg (s2, POINTER_TYPE))
10138 return NULL_TREE;
10139 else
10141 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10143 /* If either argument is "", return NULL_TREE. */
10144 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10145 /* Evaluate and ignore both arguments in case either one has
10146 side-effects. */
10147 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10148 s1, s2);
10149 return NULL_TREE;
10153 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10154 to the call.
10156 Return NULL_TREE if no simplification was possible, otherwise return the
10157 simplified form of the call as a tree.
10159 The simplified form may be a constant or other expression which
10160 computes the same value, but in a more efficient manner (including
10161 calls to other builtin functions).
10163 The call may contain arguments which need to be evaluated, but
10164 which are not useful to determine the result of the call. In
10165 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10166 COMPOUND_EXPR will be an argument which must be evaluated.
10167 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10168 COMPOUND_EXPR in the chain will contain the tree for the simplified
10169 form of the builtin function call. */
10171 static tree
10172 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10174 if (!validate_arg (s1, POINTER_TYPE)
10175 || !validate_arg (s2, POINTER_TYPE))
10176 return NULL_TREE;
10177 else
10179 /* If the first argument is "", return NULL_TREE. */
10180 const char *p1 = c_getstr (s1);
10181 if (p1 && *p1 == '\0')
10183 /* Evaluate and ignore argument s2 in case it has
10184 side-effects. */
10185 return omit_one_operand_loc (loc, size_type_node,
10186 size_zero_node, s2);
10189 /* If the second argument is "", return __builtin_strlen(s1). */
10190 const char *p2 = c_getstr (s2);
10191 if (p2 && *p2 == '\0')
10193 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10195 /* If the replacement _DECL isn't initialized, don't do the
10196 transformation. */
10197 if (!fn)
10198 return NULL_TREE;
10200 return build_call_expr_loc (loc, fn, 1, s1);
10202 return NULL_TREE;
10206 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10207 produced. False otherwise. This is done so that we don't output the error
10208 or warning twice or three times. */
10210 bool
10211 fold_builtin_next_arg (tree exp, bool va_start_p)
10213 tree fntype = TREE_TYPE (current_function_decl);
10214 int nargs = call_expr_nargs (exp);
10215 tree arg;
10216 /* There is good chance the current input_location points inside the
10217 definition of the va_start macro (perhaps on the token for
10218 builtin) in a system header, so warnings will not be emitted.
10219 Use the location in real source code. */
10220 location_t current_location =
10221 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10222 NULL);
10224 if (!stdarg_p (fntype))
10226 error ("%<va_start%> used in function with fixed args");
10227 return true;
10230 if (va_start_p)
10232 if (va_start_p && (nargs != 2))
10234 error ("wrong number of arguments to function %<va_start%>");
10235 return true;
10237 arg = CALL_EXPR_ARG (exp, 1);
10239 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10240 when we checked the arguments and if needed issued a warning. */
10241 else
10243 if (nargs == 0)
10245 /* Evidently an out of date version of <stdarg.h>; can't validate
10246 va_start's second argument, but can still work as intended. */
10247 warning_at (current_location,
10248 OPT_Wvarargs,
10249 "%<__builtin_next_arg%> called without an argument");
10250 return true;
10252 else if (nargs > 1)
10254 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10255 return true;
10257 arg = CALL_EXPR_ARG (exp, 0);
10260 if (TREE_CODE (arg) == SSA_NAME)
10261 arg = SSA_NAME_VAR (arg);
10263 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10264 or __builtin_next_arg (0) the first time we see it, after checking
10265 the arguments and if needed issuing a warning. */
10266 if (!integer_zerop (arg))
10268 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10270 /* Strip off all nops for the sake of the comparison. This
10271 is not quite the same as STRIP_NOPS. It does more.
10272 We must also strip off INDIRECT_EXPR for C++ reference
10273 parameters. */
10274 while (CONVERT_EXPR_P (arg)
10275 || TREE_CODE (arg) == INDIRECT_REF)
10276 arg = TREE_OPERAND (arg, 0);
10277 if (arg != last_parm)
10279 /* FIXME: Sometimes with the tree optimizers we can get the
10280 not the last argument even though the user used the last
10281 argument. We just warn and set the arg to be the last
10282 argument so that we will get wrong-code because of
10283 it. */
10284 warning_at (current_location,
10285 OPT_Wvarargs,
10286 "second parameter of %<va_start%> not last named argument");
10289 /* Undefined by C99 7.15.1.4p4 (va_start):
10290 "If the parameter parmN is declared with the register storage
10291 class, with a function or array type, or with a type that is
10292 not compatible with the type that results after application of
10293 the default argument promotions, the behavior is undefined."
10295 else if (DECL_REGISTER (arg))
10297 warning_at (current_location,
10298 OPT_Wvarargs,
10299 "undefined behavior when second parameter of "
10300 "%<va_start%> is declared with %<register%> storage");
10303 /* We want to verify the second parameter just once before the tree
10304 optimizers are run and then avoid keeping it in the tree,
10305 as otherwise we could warn even for correct code like:
10306 void foo (int i, ...)
10307 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10308 if (va_start_p)
10309 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10310 else
10311 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10313 return false;
10317 /* Expand a call EXP to __builtin_object_size. */
10319 static rtx
10320 expand_builtin_object_size (tree exp)
10322 tree ost;
10323 int object_size_type;
10324 tree fndecl = get_callee_fndecl (exp);
10326 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10328 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10329 exp, fndecl);
10330 expand_builtin_trap ();
10331 return const0_rtx;
10334 ost = CALL_EXPR_ARG (exp, 1);
10335 STRIP_NOPS (ost);
10337 if (TREE_CODE (ost) != INTEGER_CST
10338 || tree_int_cst_sgn (ost) < 0
10339 || compare_tree_int (ost, 3) > 0)
10341 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10342 exp, fndecl);
10343 expand_builtin_trap ();
10344 return const0_rtx;
10347 object_size_type = tree_to_shwi (ost);
10349 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10352 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10353 FCODE is the BUILT_IN_* to use.
10354 Return NULL_RTX if we failed; the caller should emit a normal call,
10355 otherwise try to get the result in TARGET, if convenient (and in
10356 mode MODE if that's convenient). */
10358 static rtx
10359 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10360 enum built_in_function fcode)
10362 if (!validate_arglist (exp,
10363 POINTER_TYPE,
10364 fcode == BUILT_IN_MEMSET_CHK
10365 ? INTEGER_TYPE : POINTER_TYPE,
10366 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10367 return NULL_RTX;
10369 tree dest = CALL_EXPR_ARG (exp, 0);
10370 tree src = CALL_EXPR_ARG (exp, 1);
10371 tree len = CALL_EXPR_ARG (exp, 2);
10372 tree size = CALL_EXPR_ARG (exp, 3);
10374 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10375 /*str=*/NULL_TREE, size);
10377 if (!tree_fits_uhwi_p (size))
10378 return NULL_RTX;
10380 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10382 /* Avoid transforming the checking call to an ordinary one when
10383 an overflow has been detected or when the call couldn't be
10384 validated because the size is not constant. */
10385 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10386 return NULL_RTX;
10388 tree fn = NULL_TREE;
10389 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10390 mem{cpy,pcpy,move,set} is available. */
10391 switch (fcode)
10393 case BUILT_IN_MEMCPY_CHK:
10394 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10395 break;
10396 case BUILT_IN_MEMPCPY_CHK:
10397 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10398 break;
10399 case BUILT_IN_MEMMOVE_CHK:
10400 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10401 break;
10402 case BUILT_IN_MEMSET_CHK:
10403 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10404 break;
10405 default:
10406 break;
10409 if (! fn)
10410 return NULL_RTX;
10412 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10413 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10414 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10415 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10417 else if (fcode == BUILT_IN_MEMSET_CHK)
10418 return NULL_RTX;
10419 else
10421 unsigned int dest_align = get_pointer_alignment (dest);
10423 /* If DEST is not a pointer type, call the normal function. */
10424 if (dest_align == 0)
10425 return NULL_RTX;
10427 /* If SRC and DEST are the same (and not volatile), do nothing. */
10428 if (operand_equal_p (src, dest, 0))
10430 tree expr;
10432 if (fcode != BUILT_IN_MEMPCPY_CHK)
10434 /* Evaluate and ignore LEN in case it has side-effects. */
10435 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10436 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10439 expr = fold_build_pointer_plus (dest, len);
10440 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10443 /* __memmove_chk special case. */
10444 if (fcode == BUILT_IN_MEMMOVE_CHK)
10446 unsigned int src_align = get_pointer_alignment (src);
10448 if (src_align == 0)
10449 return NULL_RTX;
10451 /* If src is categorized for a readonly section we can use
10452 normal __memcpy_chk. */
10453 if (readonly_data_expr (src))
10455 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10456 if (!fn)
10457 return NULL_RTX;
10458 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10459 dest, src, len, size);
10460 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10461 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10462 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10465 return NULL_RTX;
10469 /* Emit warning if a buffer overflow is detected at compile time. */
10471 static void
10472 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10474 /* The source string. */
10475 tree srcstr = NULL_TREE;
10476 /* The size of the destination object. */
10477 tree objsize = NULL_TREE;
10478 /* The string that is being concatenated with (as in __strcat_chk)
10479 or null if it isn't. */
10480 tree catstr = NULL_TREE;
10481 /* The maximum length of the source sequence in a bounded operation
10482 (such as __strncat_chk) or null if the operation isn't bounded
10483 (such as __strcat_chk). */
10484 tree maxread = NULL_TREE;
10485 /* The exact size of the access (such as in __strncpy_chk). */
10486 tree size = NULL_TREE;
10488 switch (fcode)
10490 case BUILT_IN_STRCPY_CHK:
10491 case BUILT_IN_STPCPY_CHK:
10492 srcstr = CALL_EXPR_ARG (exp, 1);
10493 objsize = CALL_EXPR_ARG (exp, 2);
10494 break;
10496 case BUILT_IN_STRCAT_CHK:
10497 /* For __strcat_chk the warning will be emitted only if overflowing
10498 by at least strlen (dest) + 1 bytes. */
10499 catstr = CALL_EXPR_ARG (exp, 0);
10500 srcstr = CALL_EXPR_ARG (exp, 1);
10501 objsize = CALL_EXPR_ARG (exp, 2);
10502 break;
10504 case BUILT_IN_STRNCAT_CHK:
10505 catstr = CALL_EXPR_ARG (exp, 0);
10506 srcstr = CALL_EXPR_ARG (exp, 1);
10507 maxread = CALL_EXPR_ARG (exp, 2);
10508 objsize = CALL_EXPR_ARG (exp, 3);
10509 break;
10511 case BUILT_IN_STRNCPY_CHK:
10512 case BUILT_IN_STPNCPY_CHK:
10513 srcstr = CALL_EXPR_ARG (exp, 1);
10514 size = CALL_EXPR_ARG (exp, 2);
10515 objsize = CALL_EXPR_ARG (exp, 3);
10516 break;
10518 case BUILT_IN_SNPRINTF_CHK:
10519 case BUILT_IN_VSNPRINTF_CHK:
10520 maxread = CALL_EXPR_ARG (exp, 1);
10521 objsize = CALL_EXPR_ARG (exp, 3);
10522 break;
10523 default:
10524 gcc_unreachable ();
10527 if (catstr && maxread)
10529 /* Check __strncat_chk. There is no way to determine the length
10530 of the string to which the source string is being appended so
10531 just warn when the length of the source string is not known. */
10532 check_strncat_sizes (exp, objsize);
10533 return;
10536 /* The destination argument is the first one for all built-ins above. */
10537 tree dst = CALL_EXPR_ARG (exp, 0);
10539 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10542 /* Emit warning if a buffer overflow is detected at compile time
10543 in __sprintf_chk/__vsprintf_chk calls. */
10545 static void
10546 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10548 tree size, len, fmt;
10549 const char *fmt_str;
10550 int nargs = call_expr_nargs (exp);
10552 /* Verify the required arguments in the original call. */
10554 if (nargs < 4)
10555 return;
10556 size = CALL_EXPR_ARG (exp, 2);
10557 fmt = CALL_EXPR_ARG (exp, 3);
10559 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10560 return;
10562 /* Check whether the format is a literal string constant. */
10563 fmt_str = c_getstr (fmt);
10564 if (fmt_str == NULL)
10565 return;
10567 if (!init_target_chars ())
10568 return;
10570 /* If the format doesn't contain % args or %%, we know its size. */
10571 if (strchr (fmt_str, target_percent) == 0)
10572 len = build_int_cstu (size_type_node, strlen (fmt_str));
10573 /* If the format is "%s" and first ... argument is a string literal,
10574 we know it too. */
10575 else if (fcode == BUILT_IN_SPRINTF_CHK
10576 && strcmp (fmt_str, target_percent_s) == 0)
10578 tree arg;
10580 if (nargs < 5)
10581 return;
10582 arg = CALL_EXPR_ARG (exp, 4);
10583 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10584 return;
10586 len = c_strlen (arg, 1);
10587 if (!len || ! tree_fits_uhwi_p (len))
10588 return;
10590 else
10591 return;
10593 /* Add one for the terminating nul. */
10594 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10596 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10597 /*maxread=*/NULL_TREE, len, size);
10600 /* Emit warning if a free is called with address of a variable. */
10602 static void
10603 maybe_emit_free_warning (tree exp)
10605 if (call_expr_nargs (exp) != 1)
10606 return;
10608 tree arg = CALL_EXPR_ARG (exp, 0);
10610 STRIP_NOPS (arg);
10611 if (TREE_CODE (arg) != ADDR_EXPR)
10612 return;
10614 arg = get_base_address (TREE_OPERAND (arg, 0));
10615 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10616 return;
10618 if (SSA_VAR_P (arg))
10619 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10620 "%Kattempt to free a non-heap object %qD", exp, arg);
10621 else
10622 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10623 "%Kattempt to free a non-heap object", exp);
10626 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10627 if possible. */
10629 static tree
10630 fold_builtin_object_size (tree ptr, tree ost)
10632 unsigned HOST_WIDE_INT bytes;
10633 int object_size_type;
10635 if (!validate_arg (ptr, POINTER_TYPE)
10636 || !validate_arg (ost, INTEGER_TYPE))
10637 return NULL_TREE;
10639 STRIP_NOPS (ost);
10641 if (TREE_CODE (ost) != INTEGER_CST
10642 || tree_int_cst_sgn (ost) < 0
10643 || compare_tree_int (ost, 3) > 0)
10644 return NULL_TREE;
10646 object_size_type = tree_to_shwi (ost);
10648 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10649 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10650 and (size_t) 0 for types 2 and 3. */
10651 if (TREE_SIDE_EFFECTS (ptr))
10652 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10654 if (TREE_CODE (ptr) == ADDR_EXPR)
10656 compute_builtin_object_size (ptr, object_size_type, &bytes);
10657 if (wi::fits_to_tree_p (bytes, size_type_node))
10658 return build_int_cstu (size_type_node, bytes);
10660 else if (TREE_CODE (ptr) == SSA_NAME)
10662 /* If object size is not known yet, delay folding until
10663 later. Maybe subsequent passes will help determining
10664 it. */
10665 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10666 && wi::fits_to_tree_p (bytes, size_type_node))
10667 return build_int_cstu (size_type_node, bytes);
10670 return NULL_TREE;
10673 /* Builtins with folding operations that operate on "..." arguments
10674 need special handling; we need to store the arguments in a convenient
10675 data structure before attempting any folding. Fortunately there are
10676 only a few builtins that fall into this category. FNDECL is the
10677 function, EXP is the CALL_EXPR for the call. */
10679 static tree
10680 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10682 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10683 tree ret = NULL_TREE;
10685 switch (fcode)
10687 case BUILT_IN_FPCLASSIFY:
10688 ret = fold_builtin_fpclassify (loc, args, nargs);
10689 break;
10691 default:
10692 break;
10694 if (ret)
10696 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10697 SET_EXPR_LOCATION (ret, loc);
10698 TREE_NO_WARNING (ret) = 1;
10699 return ret;
10701 return NULL_TREE;
10704 /* Initialize format string characters in the target charset. */
10706 bool
10707 init_target_chars (void)
10709 static bool init;
10710 if (!init)
10712 target_newline = lang_hooks.to_target_charset ('\n');
10713 target_percent = lang_hooks.to_target_charset ('%');
10714 target_c = lang_hooks.to_target_charset ('c');
10715 target_s = lang_hooks.to_target_charset ('s');
10716 if (target_newline == 0 || target_percent == 0 || target_c == 0
10717 || target_s == 0)
10718 return false;
10720 target_percent_c[0] = target_percent;
10721 target_percent_c[1] = target_c;
10722 target_percent_c[2] = '\0';
10724 target_percent_s[0] = target_percent;
10725 target_percent_s[1] = target_s;
10726 target_percent_s[2] = '\0';
10728 target_percent_s_newline[0] = target_percent;
10729 target_percent_s_newline[1] = target_s;
10730 target_percent_s_newline[2] = target_newline;
10731 target_percent_s_newline[3] = '\0';
10733 init = true;
10735 return true;
10738 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10739 and no overflow/underflow occurred. INEXACT is true if M was not
10740 exactly calculated. TYPE is the tree type for the result. This
10741 function assumes that you cleared the MPFR flags and then
10742 calculated M to see if anything subsequently set a flag prior to
10743 entering this function. Return NULL_TREE if any checks fail. */
10745 static tree
10746 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10748 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10749 overflow/underflow occurred. If -frounding-math, proceed iff the
10750 result of calling FUNC was exact. */
10751 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10752 && (!flag_rounding_math || !inexact))
10754 REAL_VALUE_TYPE rr;
10756 real_from_mpfr (&rr, m, type, GMP_RNDN);
10757 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10758 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10759 but the mpft_t is not, then we underflowed in the
10760 conversion. */
10761 if (real_isfinite (&rr)
10762 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10764 REAL_VALUE_TYPE rmode;
10766 real_convert (&rmode, TYPE_MODE (type), &rr);
10767 /* Proceed iff the specified mode can hold the value. */
10768 if (real_identical (&rmode, &rr))
10769 return build_real (type, rmode);
10772 return NULL_TREE;
10775 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10776 number and no overflow/underflow occurred. INEXACT is true if M
10777 was not exactly calculated. TYPE is the tree type for the result.
10778 This function assumes that you cleared the MPFR flags and then
10779 calculated M to see if anything subsequently set a flag prior to
10780 entering this function. Return NULL_TREE if any checks fail, if
10781 FORCE_CONVERT is true, then bypass the checks. */
10783 static tree
10784 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10786 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10787 overflow/underflow occurred. If -frounding-math, proceed iff the
10788 result of calling FUNC was exact. */
10789 if (force_convert
10790 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10791 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10792 && (!flag_rounding_math || !inexact)))
10794 REAL_VALUE_TYPE re, im;
10796 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10797 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10798 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10799 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10800 but the mpft_t is not, then we underflowed in the
10801 conversion. */
10802 if (force_convert
10803 || (real_isfinite (&re) && real_isfinite (&im)
10804 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10805 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10807 REAL_VALUE_TYPE re_mode, im_mode;
10809 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10810 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10811 /* Proceed iff the specified mode can hold the value. */
10812 if (force_convert
10813 || (real_identical (&re_mode, &re)
10814 && real_identical (&im_mode, &im)))
10815 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10816 build_real (TREE_TYPE (type), im_mode));
10819 return NULL_TREE;
10822 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10823 the pointer *(ARG_QUO) and return the result. The type is taken
10824 from the type of ARG0 and is used for setting the precision of the
10825 calculation and results. */
10827 static tree
10828 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10830 tree const type = TREE_TYPE (arg0);
10831 tree result = NULL_TREE;
10833 STRIP_NOPS (arg0);
10834 STRIP_NOPS (arg1);
10836 /* To proceed, MPFR must exactly represent the target floating point
10837 format, which only happens when the target base equals two. */
10838 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10839 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10840 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10842 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10843 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10845 if (real_isfinite (ra0) && real_isfinite (ra1))
10847 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10848 const int prec = fmt->p;
10849 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10850 tree result_rem;
10851 long integer_quo;
10852 mpfr_t m0, m1;
10854 mpfr_inits2 (prec, m0, m1, NULL);
10855 mpfr_from_real (m0, ra0, GMP_RNDN);
10856 mpfr_from_real (m1, ra1, GMP_RNDN);
10857 mpfr_clear_flags ();
10858 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10859 /* Remquo is independent of the rounding mode, so pass
10860 inexact=0 to do_mpfr_ckconv(). */
10861 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10862 mpfr_clears (m0, m1, NULL);
10863 if (result_rem)
10865 /* MPFR calculates quo in the host's long so it may
10866 return more bits in quo than the target int can hold
10867 if sizeof(host long) > sizeof(target int). This can
10868 happen even for native compilers in LP64 mode. In
10869 these cases, modulo the quo value with the largest
10870 number that the target int can hold while leaving one
10871 bit for the sign. */
10872 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10873 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10875 /* Dereference the quo pointer argument. */
10876 arg_quo = build_fold_indirect_ref (arg_quo);
10877 /* Proceed iff a valid pointer type was passed in. */
10878 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10880 /* Set the value. */
10881 tree result_quo
10882 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10883 build_int_cst (TREE_TYPE (arg_quo),
10884 integer_quo));
10885 TREE_SIDE_EFFECTS (result_quo) = 1;
10886 /* Combine the quo assignment with the rem. */
10887 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10888 result_quo, result_rem));
10893 return result;
10896 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10897 resulting value as a tree with type TYPE. The mpfr precision is
10898 set to the precision of TYPE. We assume that this mpfr function
10899 returns zero if the result could be calculated exactly within the
10900 requested precision. In addition, the integer pointer represented
10901 by ARG_SG will be dereferenced and set to the appropriate signgam
10902 (-1,1) value. */
10904 static tree
10905 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10907 tree result = NULL_TREE;
10909 STRIP_NOPS (arg);
10911 /* To proceed, MPFR must exactly represent the target floating point
10912 format, which only happens when the target base equals two. Also
10913 verify ARG is a constant and that ARG_SG is an int pointer. */
10914 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10915 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10916 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10917 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10919 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10921 /* In addition to NaN and Inf, the argument cannot be zero or a
10922 negative integer. */
10923 if (real_isfinite (ra)
10924 && ra->cl != rvc_zero
10925 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10927 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10928 const int prec = fmt->p;
10929 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10930 int inexact, sg;
10931 mpfr_t m;
10932 tree result_lg;
10934 mpfr_init2 (m, prec);
10935 mpfr_from_real (m, ra, GMP_RNDN);
10936 mpfr_clear_flags ();
10937 inexact = mpfr_lgamma (m, &sg, m, rnd);
10938 result_lg = do_mpfr_ckconv (m, type, inexact);
10939 mpfr_clear (m);
10940 if (result_lg)
10942 tree result_sg;
10944 /* Dereference the arg_sg pointer argument. */
10945 arg_sg = build_fold_indirect_ref (arg_sg);
10946 /* Assign the signgam value into *arg_sg. */
10947 result_sg = fold_build2 (MODIFY_EXPR,
10948 TREE_TYPE (arg_sg), arg_sg,
10949 build_int_cst (TREE_TYPE (arg_sg), sg));
10950 TREE_SIDE_EFFECTS (result_sg) = 1;
10951 /* Combine the signgam assignment with the lgamma result. */
10952 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10953 result_sg, result_lg));
10958 return result;
10961 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10962 mpc function FUNC on it and return the resulting value as a tree
10963 with type TYPE. The mpfr precision is set to the precision of
10964 TYPE. We assume that function FUNC returns zero if the result
10965 could be calculated exactly within the requested precision. If
10966 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10967 in the arguments and/or results. */
10969 tree
10970 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10971 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10973 tree result = NULL_TREE;
10975 STRIP_NOPS (arg0);
10976 STRIP_NOPS (arg1);
10978 /* To proceed, MPFR must exactly represent the target floating point
10979 format, which only happens when the target base equals two. */
10980 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10981 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10982 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10984 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10986 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10987 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10988 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10989 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10991 if (do_nonfinite
10992 || (real_isfinite (re0) && real_isfinite (im0)
10993 && real_isfinite (re1) && real_isfinite (im1)))
10995 const struct real_format *const fmt =
10996 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10997 const int prec = fmt->p;
10998 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10999 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11000 int inexact;
11001 mpc_t m0, m1;
11003 mpc_init2 (m0, prec);
11004 mpc_init2 (m1, prec);
11005 mpfr_from_real (mpc_realref (m0), re0, rnd);
11006 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11007 mpfr_from_real (mpc_realref (m1), re1, rnd);
11008 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11009 mpfr_clear_flags ();
11010 inexact = func (m0, m0, m1, crnd);
11011 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11012 mpc_clear (m0);
11013 mpc_clear (m1);
11017 return result;
11020 /* A wrapper function for builtin folding that prevents warnings for
11021 "statement without effect" and the like, caused by removing the
11022 call node earlier than the warning is generated. */
11024 tree
11025 fold_call_stmt (gcall *stmt, bool ignore)
11027 tree ret = NULL_TREE;
11028 tree fndecl = gimple_call_fndecl (stmt);
11029 location_t loc = gimple_location (stmt);
11030 if (fndecl && fndecl_built_in_p (fndecl)
11031 && !gimple_call_va_arg_pack_p (stmt))
11033 int nargs = gimple_call_num_args (stmt);
11034 tree *args = (nargs > 0
11035 ? gimple_call_arg_ptr (stmt, 0)
11036 : &error_mark_node);
11038 if (avoid_folding_inline_builtin (fndecl))
11039 return NULL_TREE;
11040 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11042 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11044 else
11046 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11047 if (ret)
11049 /* Propagate location information from original call to
11050 expansion of builtin. Otherwise things like
11051 maybe_emit_chk_warning, that operate on the expansion
11052 of a builtin, will use the wrong location information. */
11053 if (gimple_has_location (stmt))
11055 tree realret = ret;
11056 if (TREE_CODE (ret) == NOP_EXPR)
11057 realret = TREE_OPERAND (ret, 0);
11058 if (CAN_HAVE_LOCATION_P (realret)
11059 && !EXPR_HAS_LOCATION (realret))
11060 SET_EXPR_LOCATION (realret, loc);
11061 return realret;
11063 return ret;
11067 return NULL_TREE;
11070 /* Look up the function in builtin_decl that corresponds to DECL
11071 and set ASMSPEC as its user assembler name. DECL must be a
11072 function decl that declares a builtin. */
11074 void
11075 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11077 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11078 && asmspec != 0);
11080 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11081 set_user_assembler_name (builtin, asmspec);
11083 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11084 && INT_TYPE_SIZE < BITS_PER_WORD)
11086 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11087 set_user_assembler_libfunc ("ffs", asmspec);
11088 set_optab_libfunc (ffs_optab, mode, "ffs");
11092 /* Return true if DECL is a builtin that expands to a constant or similarly
11093 simple code. */
11094 bool
11095 is_simple_builtin (tree decl)
11097 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11098 switch (DECL_FUNCTION_CODE (decl))
11100 /* Builtins that expand to constants. */
11101 case BUILT_IN_CONSTANT_P:
11102 case BUILT_IN_EXPECT:
11103 case BUILT_IN_OBJECT_SIZE:
11104 case BUILT_IN_UNREACHABLE:
11105 /* Simple register moves or loads from stack. */
11106 case BUILT_IN_ASSUME_ALIGNED:
11107 case BUILT_IN_RETURN_ADDRESS:
11108 case BUILT_IN_EXTRACT_RETURN_ADDR:
11109 case BUILT_IN_FROB_RETURN_ADDR:
11110 case BUILT_IN_RETURN:
11111 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11112 case BUILT_IN_FRAME_ADDRESS:
11113 case BUILT_IN_VA_END:
11114 case BUILT_IN_STACK_SAVE:
11115 case BUILT_IN_STACK_RESTORE:
11116 /* Exception state returns or moves registers around. */
11117 case BUILT_IN_EH_FILTER:
11118 case BUILT_IN_EH_POINTER:
11119 case BUILT_IN_EH_COPY_VALUES:
11120 return true;
11122 default:
11123 return false;
11126 return false;
11129 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11130 most probably expanded inline into reasonably simple code. This is a
11131 superset of is_simple_builtin. */
11132 bool
11133 is_inexpensive_builtin (tree decl)
11135 if (!decl)
11136 return false;
11137 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11138 return true;
11139 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11140 switch (DECL_FUNCTION_CODE (decl))
11142 case BUILT_IN_ABS:
11143 CASE_BUILT_IN_ALLOCA:
11144 case BUILT_IN_BSWAP16:
11145 case BUILT_IN_BSWAP32:
11146 case BUILT_IN_BSWAP64:
11147 case BUILT_IN_CLZ:
11148 case BUILT_IN_CLZIMAX:
11149 case BUILT_IN_CLZL:
11150 case BUILT_IN_CLZLL:
11151 case BUILT_IN_CTZ:
11152 case BUILT_IN_CTZIMAX:
11153 case BUILT_IN_CTZL:
11154 case BUILT_IN_CTZLL:
11155 case BUILT_IN_FFS:
11156 case BUILT_IN_FFSIMAX:
11157 case BUILT_IN_FFSL:
11158 case BUILT_IN_FFSLL:
11159 case BUILT_IN_IMAXABS:
11160 case BUILT_IN_FINITE:
11161 case BUILT_IN_FINITEF:
11162 case BUILT_IN_FINITEL:
11163 case BUILT_IN_FINITED32:
11164 case BUILT_IN_FINITED64:
11165 case BUILT_IN_FINITED128:
11166 case BUILT_IN_FPCLASSIFY:
11167 case BUILT_IN_ISFINITE:
11168 case BUILT_IN_ISINF_SIGN:
11169 case BUILT_IN_ISINF:
11170 case BUILT_IN_ISINFF:
11171 case BUILT_IN_ISINFL:
11172 case BUILT_IN_ISINFD32:
11173 case BUILT_IN_ISINFD64:
11174 case BUILT_IN_ISINFD128:
11175 case BUILT_IN_ISNAN:
11176 case BUILT_IN_ISNANF:
11177 case BUILT_IN_ISNANL:
11178 case BUILT_IN_ISNAND32:
11179 case BUILT_IN_ISNAND64:
11180 case BUILT_IN_ISNAND128:
11181 case BUILT_IN_ISNORMAL:
11182 case BUILT_IN_ISGREATER:
11183 case BUILT_IN_ISGREATEREQUAL:
11184 case BUILT_IN_ISLESS:
11185 case BUILT_IN_ISLESSEQUAL:
11186 case BUILT_IN_ISLESSGREATER:
11187 case BUILT_IN_ISUNORDERED:
11188 case BUILT_IN_VA_ARG_PACK:
11189 case BUILT_IN_VA_ARG_PACK_LEN:
11190 case BUILT_IN_VA_COPY:
11191 case BUILT_IN_TRAP:
11192 case BUILT_IN_SAVEREGS:
11193 case BUILT_IN_POPCOUNTL:
11194 case BUILT_IN_POPCOUNTLL:
11195 case BUILT_IN_POPCOUNTIMAX:
11196 case BUILT_IN_POPCOUNT:
11197 case BUILT_IN_PARITYL:
11198 case BUILT_IN_PARITYLL:
11199 case BUILT_IN_PARITYIMAX:
11200 case BUILT_IN_PARITY:
11201 case BUILT_IN_LABS:
11202 case BUILT_IN_LLABS:
11203 case BUILT_IN_PREFETCH:
11204 case BUILT_IN_ACC_ON_DEVICE:
11205 return true;
11207 default:
11208 return is_simple_builtin (decl);
11211 return false;
11214 /* Return true if T is a constant and the value cast to a target char
11215 can be represented by a host char.
11216 Store the casted char constant in *P if so. */
11218 bool
11219 target_char_cst_p (tree t, char *p)
11221 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11222 return false;
11224 *p = (char)tree_to_uhwi (t);
11225 return true;