Turn of ipa-ra in builtins test (PR91059)
[official-gcc.git] / gcc / builtins.c
blobe2ba356c0d396ce2f6f10237fc5e98b699c8972c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp,
129 memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
577 c_strlen_data lendata = { };
578 tree len = c_strlen (exp, 1, &lendata);
579 if (len == NULL_TREE && lendata.minlen && lendata.decl)
581 if (size)
583 len = lendata.minlen;
584 if (lendata.off)
586 /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 but not in a SSA_NAME + CST expression. */
588 if (TREE_CODE (lendata.off) == INTEGER_CST)
589 *exact = true;
590 else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
593 /* Subtract the offset from the size of the array. */
594 *exact = false;
595 tree temp = TREE_OPERAND (lendata.off, 1);
596 temp = fold_convert (ssizetype, temp);
597 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 else
600 *exact = false;
602 else
603 *exact = true;
605 *size = len;
607 return lendata.decl;
610 return NULL_TREE;
613 /* Compute the length of a null-terminated character string or wide
614 character string handling character sizes of 1, 2, and 4 bytes.
615 TREE_STRING_LENGTH is not the right way because it evaluates to
616 the size of the character array in bytes (as opposed to characters)
617 and because it can contain a zero byte in the middle.
619 ONLY_VALUE should be nonzero if the result is not going to be emitted
620 into the instruction stream and zero if it is going to be expanded.
621 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622 is returned, otherwise NULL, since
623 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624 evaluate the side-effects.
626 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627 accesses. Note that this implies the result is not going to be emitted
628 into the instruction stream.
630 Additional information about the string accessed may be recorded
631 in DATA. For example, if SRC references an unterminated string,
632 then the declaration will be stored in the DECL field. If the
633 length of the unterminated string can be determined, it'll be
634 stored in the LEN field. Note this length could well be different
635 than what a C strlen call would return.
637 ELTSIZE is 1 for normal single byte character strings, and 2 or
638 4 for wide characer strings. ELTSIZE is by default 1.
640 The value returned is of type `ssizetype'. */
642 tree
643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
645 /* If we were not passed a DATA pointer, then get one to a local
646 structure. That avoids having to check DATA for NULL before
647 each time we want to use it. */
648 c_strlen_data local_strlen_data = { };
649 if (!data)
650 data = &local_strlen_data;
652 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653 STRIP_NOPS (src);
654 if (TREE_CODE (src) == COND_EXPR
655 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
657 tree len1, len2;
659 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661 if (tree_int_cst_equal (len1, len2))
662 return len1;
665 if (TREE_CODE (src) == COMPOUND_EXPR
666 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
669 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
671 /* Offset from the beginning of the string in bytes. */
672 tree byteoff;
673 tree memsize;
674 tree decl;
675 src = string_constant (src, &byteoff, &memsize, &decl);
676 if (src == 0)
677 return NULL_TREE;
679 /* Determine the size of the string element. */
680 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681 return NULL_TREE;
683 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685 in case the latter is less than the size of the array, such as when
686 SRC refers to a short string literal used to initialize a large array.
687 In that case, the elements of the array after the terminating NUL are
688 all NUL. */
689 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690 strelts = strelts / eltsize;
692 if (!tree_fits_uhwi_p (memsize))
693 return NULL_TREE;
695 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
697 /* PTR can point to the byte representation of any string type, including
698 char* and wchar_t*. */
699 const char *ptr = TREE_STRING_POINTER (src);
701 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
703 /* The code below works only for single byte character types. */
704 if (eltsize != 1)
705 return NULL_TREE;
707 /* If the string has an internal NUL character followed by any
708 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 the offset to the following NUL if we don't know where to
710 start searching for it. */
711 unsigned len = string_length (ptr, eltsize, strelts);
713 /* Return when an embedded null character is found or none at all.
714 In the latter case, set the DECL/LEN field in the DATA structure
715 so that callers may examine them. */
716 if (len + 1 < strelts)
717 return NULL_TREE;
718 else if (len >= maxelts)
720 data->decl = decl;
721 data->off = byteoff;
722 data->minlen = ssize_int (len);
723 return NULL_TREE;
726 /* For empty strings the result should be zero. */
727 if (len == 0)
728 return ssize_int (0);
730 /* We don't know the starting offset, but we do know that the string
731 has no internal zero bytes. If the offset falls within the bounds
732 of the string subtract the offset from the length of the string,
733 and return that. Otherwise the length is zero. Take care to
734 use SAVE_EXPR in case the OFFSET has side-effects. */
735 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 : byteoff;
737 offsave = fold_convert_loc (loc, sizetype, offsave);
738 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 size_int (len));
740 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 offsave);
742 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 build_zero_cst (ssizetype));
747 /* Offset from the beginning of the string in elements. */
748 HOST_WIDE_INT eltoff;
750 /* We have a known offset into the string. Start searching there for
751 a null character if we can represent it as a single HOST_WIDE_INT. */
752 if (byteoff == 0)
753 eltoff = 0;
754 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755 eltoff = -1;
756 else
757 eltoff = tree_to_uhwi (byteoff) / eltsize;
759 /* If the offset is known to be out of bounds, warn, and call strlen at
760 runtime. */
761 if (eltoff < 0 || eltoff >= maxelts)
763 /* Suppress multiple warnings for propagated constant strings. */
764 if (only_value != 2
765 && !TREE_NO_WARNING (src)
766 && warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff))
769 TREE_NO_WARNING (src) = 1;
770 return NULL_TREE;
773 /* If eltoff is larger than strelts but less than maxelts the
774 string length is zero, since the excess memory will be zero. */
775 if (eltoff > strelts)
776 return ssize_int (0);
778 /* Use strlen to search for the first zero byte. Since any strings
779 constructed with build_string will have nulls appended, we win even
780 if we get handed something like (char[4])"abcd".
782 Since ELTOFF is our starting index into the string, no further
783 calculation is needed. */
784 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 strelts - eltoff);
787 /* Don't know what to return if there was no zero termination.
788 Ideally this would turn into a gcc_checking_assert over time.
789 Set DECL/LEN so callers can examine them. */
790 if (len >= maxelts - eltoff)
792 data->decl = decl;
793 data->off = byteoff;
794 data->minlen = ssize_int (len);
795 return NULL_TREE;
798 return ssize_int (len);
801 /* Return a constant integer corresponding to target reading
802 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
803 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804 are assumed to be zero, otherwise it reads as many characters
805 as needed. */
808 c_readstr (const char *str, scalar_int_mode mode,
809 bool null_terminated_p/*=true*/)
811 HOST_WIDE_INT ch;
812 unsigned int i, j;
813 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
815 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817 / HOST_BITS_PER_WIDE_INT;
819 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820 for (i = 0; i < len; i++)
821 tmp[i] = 0;
823 ch = 1;
824 for (i = 0; i < GET_MODE_SIZE (mode); i++)
826 j = i;
827 if (WORDS_BIG_ENDIAN)
828 j = GET_MODE_SIZE (mode) - i - 1;
829 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832 j *= BITS_PER_UNIT;
834 if (ch || !null_terminated_p)
835 ch = (unsigned char) str[i];
836 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
839 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840 return immed_wide_int_const (c, mode);
843 /* Cast a target constant CST to target CHAR and if that value fits into
844 host char type, return zero and put that value into variable pointed to by
845 P. */
847 static int
848 target_char_cast (tree cst, char *p)
850 unsigned HOST_WIDE_INT val, hostval;
852 if (TREE_CODE (cst) != INTEGER_CST
853 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854 return 1;
856 /* Do not care if it fits or not right here. */
857 val = TREE_INT_CST_LOW (cst);
859 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
862 hostval = val;
863 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
866 if (val != hostval)
867 return 1;
869 *p = hostval;
870 return 0;
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
877 static tree
878 builtin_save_expr (tree exp)
880 if (TREE_CODE (exp) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp) == 0
882 && (TREE_CODE (exp) == PARM_DECL
883 || (VAR_P (exp) && !TREE_STATIC (exp)))))
884 return exp;
886 return save_expr (exp);
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
893 static rtx
894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
896 int i;
897 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898 if (tem == NULL_RTX)
900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 tem = frame_pointer_rtx;
911 else
913 tem = hard_frame_pointer_rtx;
915 /* Tell reload not to eliminate the frame pointer. */
916 crtl->accesses_prior_frames = 1;
920 if (count > 0)
921 SETUP_FRAME_ADDRESSES ();
923 /* On the SPARC, the return address is not in the frame, it is in a
924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
927 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928 count--;
930 /* Scan back COUNT frames to the specified frame. */
931 for (i = 0; i < count; i++)
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
935 tem = DYNAMIC_CHAIN_ADDRESS (tem);
936 tem = memory_address (Pmode, tem);
937 tem = gen_frame_mem (Pmode, tem);
938 tem = copy_to_reg (tem);
941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
943 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944 return FRAME_ADDR_RTX (tem);
946 /* For __builtin_return_address, get the return address from that frame. */
947 #ifdef RETURN_ADDR_RTX
948 tem = RETURN_ADDR_RTX (count, tem);
949 #else
950 tem = memory_address (Pmode,
951 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952 tem = gen_frame_mem (Pmode, tem);
953 #endif
954 return tem;
957 /* Alias set used for setjmp buffer. */
958 static alias_set_type setjmp_alias_set = -1;
960 /* Construct the leading half of a __builtin_setjmp call. Control will
961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
964 void
965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
967 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 rtx stack_save;
969 rtx mem;
971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
974 buf_addr = convert_memory_address (Pmode, buf_addr);
976 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
982 mem = gen_rtx_MEM (Pmode, buf_addr);
983 set_mem_alias_set (mem, setjmp_alias_set);
984 emit_move_insn (mem, hard_frame_pointer_rtx);
986 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 GET_MODE_SIZE (Pmode))),
988 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (validize_mem (mem),
991 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
993 stack_save = gen_rtx_MEM (sa_mode,
994 plus_constant (Pmode, buf_addr,
995 2 * GET_MODE_SIZE (Pmode)));
996 set_mem_alias_set (stack_save, setjmp_alias_set);
997 emit_stack_save (SAVE_NONLOCAL, &stack_save);
999 /* If there is further processing to do, do it. */
1000 if (targetm.have_builtin_setjmp_setup ())
1001 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1003 /* We have a nonlocal label. */
1004 cfun->has_nonlocal_label = 1;
1007 /* Construct the trailing part of a __builtin_setjmp call. This is
1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1011 void
1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1014 rtx chain;
1016 /* Mark the FP as used when we get here, so we have to make sure it's
1017 marked as used by this function. */
1018 emit_use (hard_frame_pointer_rtx);
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
1022 chain = rtx_for_static_chain (current_function_decl, true);
1023 if (chain && REG_P (chain))
1024 emit_clobber (chain);
1026 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1028 /* If the argument pointer can be eliminated in favor of the
1029 frame pointer, we don't need to restore it. We assume here
1030 that if such an elimination is present, it can always be used.
1031 This is the case on all known machines; if we don't make this
1032 assumption, we do unnecessary saving on many machines. */
1033 size_t i;
1034 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1036 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1037 if (elim_regs[i].from == ARG_POINTER_REGNUM
1038 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1039 break;
1041 if (i == ARRAY_SIZE (elim_regs))
1043 /* Now restore our arg pointer from the address at which it
1044 was saved in our stack frame. */
1045 emit_move_insn (crtl->args.internal_arg_pointer,
1046 copy_to_reg (get_arg_pointer_save_area ()));
1050 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1051 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1052 else if (targetm.have_nonlocal_goto_receiver ())
1053 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1054 else
1055 { /* Nothing */ }
1057 /* We must not allow the code we just generated to be reordered by
1058 scheduling. Specifically, the update of the frame pointer must
1059 happen immediately, not later. */
1060 emit_insn (gen_blockage ());
1063 /* __builtin_longjmp is passed a pointer to an array of five words (not
1064 all will be used on all machines). It operates similarly to the C
1065 library function of the same name, but is more efficient. Much of
1066 the code below is copied from the handling of non-local gotos. */
1068 static void
1069 expand_builtin_longjmp (rtx buf_addr, rtx value)
1071 rtx fp, lab, stack;
1072 rtx_insn *insn, *last;
1073 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1075 /* DRAP is needed for stack realign if longjmp is expanded to current
1076 function */
1077 if (SUPPORTS_STACK_ALIGNMENT)
1078 crtl->need_drap = true;
1080 if (setjmp_alias_set == -1)
1081 setjmp_alias_set = new_alias_set ();
1083 buf_addr = convert_memory_address (Pmode, buf_addr);
1085 buf_addr = force_reg (Pmode, buf_addr);
1087 /* We require that the user must pass a second argument of 1, because
1088 that is what builtin_setjmp will return. */
1089 gcc_assert (value == const1_rtx);
1091 last = get_last_insn ();
1092 if (targetm.have_builtin_longjmp ())
1093 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1094 else
1096 fp = gen_rtx_MEM (Pmode, buf_addr);
1097 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1098 GET_MODE_SIZE (Pmode)));
1100 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1101 2 * GET_MODE_SIZE (Pmode)));
1102 set_mem_alias_set (fp, setjmp_alias_set);
1103 set_mem_alias_set (lab, setjmp_alias_set);
1104 set_mem_alias_set (stack, setjmp_alias_set);
1106 /* Pick up FP, label, and SP from the block and jump. This code is
1107 from expand_goto in stmt.c; see there for detailed comments. */
1108 if (targetm.have_nonlocal_goto ())
1109 /* We have to pass a value to the nonlocal_goto pattern that will
1110 get copied into the static_chain pointer, but it does not matter
1111 what that value is, because builtin_setjmp does not use it. */
1112 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1113 else
1115 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1116 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1118 lab = copy_to_reg (lab);
1120 /* Restore the frame pointer and stack pointer. We must use a
1121 temporary since the setjmp buffer may be a local. */
1122 fp = copy_to_reg (fp);
1123 emit_stack_restore (SAVE_NONLOCAL, stack);
1125 /* Ensure the frame pointer move is not optimized. */
1126 emit_insn (gen_blockage ());
1127 emit_clobber (hard_frame_pointer_rtx);
1128 emit_clobber (frame_pointer_rtx);
1129 emit_move_insn (hard_frame_pointer_rtx, fp);
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1133 emit_indirect_jump (lab);
1137 /* Search backwards and mark the jump insn as a non-local goto.
1138 Note that this precludes the use of __builtin_longjmp to a
1139 __builtin_setjmp target in the same function. However, we've
1140 already cautioned the user that these functions are for
1141 internal exception handling use only. */
1142 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1144 gcc_assert (insn != last);
1146 if (JUMP_P (insn))
1148 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1149 break;
1151 else if (CALL_P (insn))
1152 break;
1156 static inline bool
1157 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1159 return (iter->i < iter->n);
1162 /* This function validates the types of a function call argument list
1163 against a specified list of tree_codes. If the last specifier is a 0,
1164 that represents an ellipsis, otherwise the last specifier must be a
1165 VOID_TYPE. */
1167 static bool
1168 validate_arglist (const_tree callexpr, ...)
1170 enum tree_code code;
1171 bool res = 0;
1172 va_list ap;
1173 const_call_expr_arg_iterator iter;
1174 const_tree arg;
1176 va_start (ap, callexpr);
1177 init_const_call_expr_arg_iterator (callexpr, &iter);
1179 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1180 tree fn = CALL_EXPR_FN (callexpr);
1181 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1183 for (unsigned argno = 1; ; ++argno)
1185 code = (enum tree_code) va_arg (ap, int);
1187 switch (code)
1189 case 0:
1190 /* This signifies an ellipses, any further arguments are all ok. */
1191 res = true;
1192 goto end;
1193 case VOID_TYPE:
1194 /* This signifies an endlink, if no arguments remain, return
1195 true, otherwise return false. */
1196 res = !more_const_call_expr_args_p (&iter);
1197 goto end;
1198 case POINTER_TYPE:
1199 /* The actual argument must be nonnull when either the whole
1200 called function has been declared nonnull, or when the formal
1201 argument corresponding to the actual argument has been. */
1202 if (argmap
1203 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1205 arg = next_const_call_expr_arg (&iter);
1206 if (!validate_arg (arg, code) || integer_zerop (arg))
1207 goto end;
1208 break;
1210 /* FALLTHRU */
1211 default:
1212 /* If no parameters remain or the parameter's code does not
1213 match the specified code, return false. Otherwise continue
1214 checking any remaining arguments. */
1215 arg = next_const_call_expr_arg (&iter);
1216 if (!validate_arg (arg, code))
1217 goto end;
1218 break;
1222 /* We need gotos here since we can only have one VA_CLOSE in a
1223 function. */
1224 end: ;
1225 va_end (ap);
1227 BITMAP_FREE (argmap);
1229 return res;
1232 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1233 and the address of the save area. */
1235 static rtx
1236 expand_builtin_nonlocal_goto (tree exp)
1238 tree t_label, t_save_area;
1239 rtx r_label, r_save_area, r_fp, r_sp;
1240 rtx_insn *insn;
1242 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1243 return NULL_RTX;
1245 t_label = CALL_EXPR_ARG (exp, 0);
1246 t_save_area = CALL_EXPR_ARG (exp, 1);
1248 r_label = expand_normal (t_label);
1249 r_label = convert_memory_address (Pmode, r_label);
1250 r_save_area = expand_normal (t_save_area);
1251 r_save_area = convert_memory_address (Pmode, r_save_area);
1252 /* Copy the address of the save location to a register just in case it was
1253 based on the frame pointer. */
1254 r_save_area = copy_to_reg (r_save_area);
1255 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1256 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1257 plus_constant (Pmode, r_save_area,
1258 GET_MODE_SIZE (Pmode)));
1260 crtl->has_nonlocal_goto = 1;
1262 /* ??? We no longer need to pass the static chain value, afaik. */
1263 if (targetm.have_nonlocal_goto ())
1264 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1265 else
1267 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1268 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1270 r_label = copy_to_reg (r_label);
1272 /* Restore the frame pointer and stack pointer. We must use a
1273 temporary since the setjmp buffer may be a local. */
1274 r_fp = copy_to_reg (r_fp);
1275 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1277 /* Ensure the frame pointer move is not optimized. */
1278 emit_insn (gen_blockage ());
1279 emit_clobber (hard_frame_pointer_rtx);
1280 emit_clobber (frame_pointer_rtx);
1281 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1283 /* USE of hard_frame_pointer_rtx added for consistency;
1284 not clear if really needed. */
1285 emit_use (hard_frame_pointer_rtx);
1286 emit_use (stack_pointer_rtx);
1288 /* If the architecture is using a GP register, we must
1289 conservatively assume that the target function makes use of it.
1290 The prologue of functions with nonlocal gotos must therefore
1291 initialize the GP register to the appropriate value, and we
1292 must then make sure that this value is live at the point
1293 of the jump. (Note that this doesn't necessarily apply
1294 to targets with a nonlocal_goto pattern; they are free
1295 to implement it in their own way. Note also that this is
1296 a no-op if the GP register is a global invariant.) */
1297 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1298 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1299 emit_use (pic_offset_table_rtx);
1301 emit_indirect_jump (r_label);
1304 /* Search backwards to the jump insn and mark it as a
1305 non-local goto. */
1306 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1308 if (JUMP_P (insn))
1310 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1311 break;
1313 else if (CALL_P (insn))
1314 break;
1317 return const0_rtx;
1320 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1321 (not all will be used on all machines) that was passed to __builtin_setjmp.
1322 It updates the stack pointer in that block to the current value. This is
1323 also called directly by the SJLJ exception handling code. */
1325 void
1326 expand_builtin_update_setjmp_buf (rtx buf_addr)
1328 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1329 buf_addr = convert_memory_address (Pmode, buf_addr);
1330 rtx stack_save
1331 = gen_rtx_MEM (sa_mode,
1332 memory_address
1333 (sa_mode,
1334 plus_constant (Pmode, buf_addr,
1335 2 * GET_MODE_SIZE (Pmode))));
1337 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1340 /* Expand a call to __builtin_prefetch. For a target that does not support
1341 data prefetch, evaluate the memory address argument in case it has side
1342 effects. */
1344 static void
1345 expand_builtin_prefetch (tree exp)
1347 tree arg0, arg1, arg2;
1348 int nargs;
1349 rtx op0, op1, op2;
1351 if (!validate_arglist (exp, POINTER_TYPE, 0))
1352 return;
1354 arg0 = CALL_EXPR_ARG (exp, 0);
1356 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1357 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1358 locality). */
1359 nargs = call_expr_nargs (exp);
1360 if (nargs > 1)
1361 arg1 = CALL_EXPR_ARG (exp, 1);
1362 else
1363 arg1 = integer_zero_node;
1364 if (nargs > 2)
1365 arg2 = CALL_EXPR_ARG (exp, 2);
1366 else
1367 arg2 = integer_three_node;
1369 /* Argument 0 is an address. */
1370 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1372 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1373 if (TREE_CODE (arg1) != INTEGER_CST)
1375 error ("second argument to %<__builtin_prefetch%> must be a constant");
1376 arg1 = integer_zero_node;
1378 op1 = expand_normal (arg1);
1379 /* Argument 1 must be either zero or one. */
1380 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1382 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1383 " using zero");
1384 op1 = const0_rtx;
1387 /* Argument 2 (locality) must be a compile-time constant int. */
1388 if (TREE_CODE (arg2) != INTEGER_CST)
1390 error ("third argument to %<__builtin_prefetch%> must be a constant");
1391 arg2 = integer_zero_node;
1393 op2 = expand_normal (arg2);
1394 /* Argument 2 must be 0, 1, 2, or 3. */
1395 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1397 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1398 op2 = const0_rtx;
1401 if (targetm.have_prefetch ())
1403 struct expand_operand ops[3];
1405 create_address_operand (&ops[0], op0);
1406 create_integer_operand (&ops[1], INTVAL (op1));
1407 create_integer_operand (&ops[2], INTVAL (op2));
1408 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1409 return;
1412 /* Don't do anything with direct references to volatile memory, but
1413 generate code to handle other side effects. */
1414 if (!MEM_P (op0) && side_effects_p (op0))
1415 emit_insn (op0);
1418 /* Get a MEM rtx for expression EXP which is the address of an operand
1419 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1420 the maximum length of the block of memory that might be accessed or
1421 NULL if unknown. */
1423 static rtx
1424 get_memory_rtx (tree exp, tree len)
1426 tree orig_exp = exp;
1427 rtx addr, mem;
1429 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1430 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1431 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1432 exp = TREE_OPERAND (exp, 0);
1434 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1435 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1437 /* Get an expression we can use to find the attributes to assign to MEM.
1438 First remove any nops. */
1439 while (CONVERT_EXPR_P (exp)
1440 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1441 exp = TREE_OPERAND (exp, 0);
1443 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1444 (as builtin stringops may alias with anything). */
1445 exp = fold_build2 (MEM_REF,
1446 build_array_type (char_type_node,
1447 build_range_type (sizetype,
1448 size_one_node, len)),
1449 exp, build_int_cst (ptr_type_node, 0));
1451 /* If the MEM_REF has no acceptable address, try to get the base object
1452 from the original address we got, and build an all-aliasing
1453 unknown-sized access to that one. */
1454 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1455 set_mem_attributes (mem, exp, 0);
1456 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1457 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1458 0))))
1460 exp = build_fold_addr_expr (exp);
1461 exp = fold_build2 (MEM_REF,
1462 build_array_type (char_type_node,
1463 build_range_type (sizetype,
1464 size_zero_node,
1465 NULL)),
1466 exp, build_int_cst (ptr_type_node, 0));
1467 set_mem_attributes (mem, exp, 0);
1469 set_mem_alias_set (mem, 0);
1470 return mem;
1473 /* Built-in functions to perform an untyped call and return. */
1475 #define apply_args_mode \
1476 (this_target_builtins->x_apply_args_mode)
1477 #define apply_result_mode \
1478 (this_target_builtins->x_apply_result_mode)
1480 /* Return the size required for the block returned by __builtin_apply_args,
1481 and initialize apply_args_mode. */
1483 static int
1484 apply_args_size (void)
1486 static int size = -1;
1487 int align;
1488 unsigned int regno;
1490 /* The values computed by this function never change. */
1491 if (size < 0)
1493 /* The first value is the incoming arg-pointer. */
1494 size = GET_MODE_SIZE (Pmode);
1496 /* The second value is the structure value address unless this is
1497 passed as an "invisible" first argument. */
1498 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1499 size += GET_MODE_SIZE (Pmode);
1501 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1502 if (FUNCTION_ARG_REGNO_P (regno))
1504 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1506 gcc_assert (mode != VOIDmode);
1508 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1509 if (size % align != 0)
1510 size = CEIL (size, align) * align;
1511 size += GET_MODE_SIZE (mode);
1512 apply_args_mode[regno] = mode;
1514 else
1516 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1519 return size;
1522 /* Return the size required for the block returned by __builtin_apply,
1523 and initialize apply_result_mode. */
1525 static int
1526 apply_result_size (void)
1528 static int size = -1;
1529 int align, regno;
1531 /* The values computed by this function never change. */
1532 if (size < 0)
1534 size = 0;
1536 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1537 if (targetm.calls.function_value_regno_p (regno))
1539 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1541 gcc_assert (mode != VOIDmode);
1543 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1544 if (size % align != 0)
1545 size = CEIL (size, align) * align;
1546 size += GET_MODE_SIZE (mode);
1547 apply_result_mode[regno] = mode;
1549 else
1550 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1552 /* Allow targets that use untyped_call and untyped_return to override
1553 the size so that machine-specific information can be stored here. */
1554 #ifdef APPLY_RESULT_SIZE
1555 size = APPLY_RESULT_SIZE;
1556 #endif
1558 return size;
1561 /* Create a vector describing the result block RESULT. If SAVEP is true,
1562 the result block is used to save the values; otherwise it is used to
1563 restore the values. */
1565 static rtx
1566 result_vector (int savep, rtx result)
1568 int regno, size, align, nelts;
1569 fixed_size_mode mode;
1570 rtx reg, mem;
1571 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1573 size = nelts = 0;
1574 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1575 if ((mode = apply_result_mode[regno]) != VOIDmode)
1577 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1578 if (size % align != 0)
1579 size = CEIL (size, align) * align;
1580 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1581 mem = adjust_address (result, mode, size);
1582 savevec[nelts++] = (savep
1583 ? gen_rtx_SET (mem, reg)
1584 : gen_rtx_SET (reg, mem));
1585 size += GET_MODE_SIZE (mode);
1587 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1590 /* Save the state required to perform an untyped call with the same
1591 arguments as were passed to the current function. */
1593 static rtx
1594 expand_builtin_apply_args_1 (void)
1596 rtx registers, tem;
1597 int size, align, regno;
1598 fixed_size_mode mode;
1599 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1601 /* Create a block where the arg-pointer, structure value address,
1602 and argument registers can be saved. */
1603 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1605 /* Walk past the arg-pointer and structure value address. */
1606 size = GET_MODE_SIZE (Pmode);
1607 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1608 size += GET_MODE_SIZE (Pmode);
1610 /* Save each register used in calling a function to the block. */
1611 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1612 if ((mode = apply_args_mode[regno]) != VOIDmode)
1614 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1615 if (size % align != 0)
1616 size = CEIL (size, align) * align;
1618 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1620 emit_move_insn (adjust_address (registers, mode, size), tem);
1621 size += GET_MODE_SIZE (mode);
1624 /* Save the arg pointer to the block. */
1625 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1626 /* We need the pointer as the caller actually passed them to us, not
1627 as we might have pretended they were passed. Make sure it's a valid
1628 operand, as emit_move_insn isn't expected to handle a PLUS. */
1629 if (STACK_GROWS_DOWNWARD)
1631 = force_operand (plus_constant (Pmode, tem,
1632 crtl->args.pretend_args_size),
1633 NULL_RTX);
1634 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1636 size = GET_MODE_SIZE (Pmode);
1638 /* Save the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 if (struct_incoming_value)
1641 emit_move_insn (adjust_address (registers, Pmode, size),
1642 copy_to_reg (struct_incoming_value));
1644 /* Return the address of the block. */
1645 return copy_addr_to_reg (XEXP (registers, 0));
1648 /* __builtin_apply_args returns block of memory allocated on
1649 the stack into which is stored the arg pointer, structure
1650 value address, static chain, and all the registers that might
1651 possibly be used in performing a function call. The code is
1652 moved to the start of the function so the incoming values are
1653 saved. */
1655 static rtx
1656 expand_builtin_apply_args (void)
1658 /* Don't do __builtin_apply_args more than once in a function.
1659 Save the result of the first call and reuse it. */
1660 if (apply_args_value != 0)
1661 return apply_args_value;
1663 /* When this function is called, it means that registers must be
1664 saved on entry to this function. So we migrate the
1665 call to the first insn of this function. */
1666 rtx temp;
1668 start_sequence ();
1669 temp = expand_builtin_apply_args_1 ();
1670 rtx_insn *seq = get_insns ();
1671 end_sequence ();
1673 apply_args_value = temp;
1675 /* Put the insns after the NOTE that starts the function.
1676 If this is inside a start_sequence, make the outer-level insn
1677 chain current, so the code is placed at the start of the
1678 function. If internal_arg_pointer is a non-virtual pseudo,
1679 it needs to be placed after the function that initializes
1680 that pseudo. */
1681 push_topmost_sequence ();
1682 if (REG_P (crtl->args.internal_arg_pointer)
1683 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1684 emit_insn_before (seq, parm_birth_insn);
1685 else
1686 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1687 pop_topmost_sequence ();
1688 return temp;
1692 /* Perform an untyped call and save the state required to perform an
1693 untyped return of whatever value was returned by the given function. */
1695 static rtx
1696 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1698 int size, align, regno;
1699 fixed_size_mode mode;
1700 rtx incoming_args, result, reg, dest, src;
1701 rtx_call_insn *call_insn;
1702 rtx old_stack_level = 0;
1703 rtx call_fusage = 0;
1704 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1706 arguments = convert_memory_address (Pmode, arguments);
1708 /* Create a block where the return registers can be saved. */
1709 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1711 /* Fetch the arg pointer from the ARGUMENTS block. */
1712 incoming_args = gen_reg_rtx (Pmode);
1713 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1714 if (!STACK_GROWS_DOWNWARD)
1715 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1716 incoming_args, 0, OPTAB_LIB_WIDEN);
1718 /* Push a new argument block and copy the arguments. Do not allow
1719 the (potential) memcpy call below to interfere with our stack
1720 manipulations. */
1721 do_pending_stack_adjust ();
1722 NO_DEFER_POP;
1724 /* Save the stack with nonlocal if available. */
1725 if (targetm.have_save_stack_nonlocal ())
1726 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1727 else
1728 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1730 /* Allocate a block of memory onto the stack and copy the memory
1731 arguments to the outgoing arguments address. We can pass TRUE
1732 as the 4th argument because we just saved the stack pointer
1733 and will restore it right after the call. */
1734 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1736 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1737 may have already set current_function_calls_alloca to true.
1738 current_function_calls_alloca won't be set if argsize is zero,
1739 so we have to guarantee need_drap is true here. */
1740 if (SUPPORTS_STACK_ALIGNMENT)
1741 crtl->need_drap = true;
1743 dest = virtual_outgoing_args_rtx;
1744 if (!STACK_GROWS_DOWNWARD)
1746 if (CONST_INT_P (argsize))
1747 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1748 else
1749 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1751 dest = gen_rtx_MEM (BLKmode, dest);
1752 set_mem_align (dest, PARM_BOUNDARY);
1753 src = gen_rtx_MEM (BLKmode, incoming_args);
1754 set_mem_align (src, PARM_BOUNDARY);
1755 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1757 /* Refer to the argument block. */
1758 apply_args_size ();
1759 arguments = gen_rtx_MEM (BLKmode, arguments);
1760 set_mem_align (arguments, PARM_BOUNDARY);
1762 /* Walk past the arg-pointer and structure value address. */
1763 size = GET_MODE_SIZE (Pmode);
1764 if (struct_value)
1765 size += GET_MODE_SIZE (Pmode);
1767 /* Restore each of the registers previously saved. Make USE insns
1768 for each of these registers for use in making the call. */
1769 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1770 if ((mode = apply_args_mode[regno]) != VOIDmode)
1772 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1773 if (size % align != 0)
1774 size = CEIL (size, align) * align;
1775 reg = gen_rtx_REG (mode, regno);
1776 emit_move_insn (reg, adjust_address (arguments, mode, size));
1777 use_reg (&call_fusage, reg);
1778 size += GET_MODE_SIZE (mode);
1781 /* Restore the structure value address unless this is passed as an
1782 "invisible" first argument. */
1783 size = GET_MODE_SIZE (Pmode);
1784 if (struct_value)
1786 rtx value = gen_reg_rtx (Pmode);
1787 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1788 emit_move_insn (struct_value, value);
1789 if (REG_P (struct_value))
1790 use_reg (&call_fusage, struct_value);
1793 /* All arguments and registers used for the call are set up by now! */
1794 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1796 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1797 and we don't want to load it into a register as an optimization,
1798 because prepare_call_address already did it if it should be done. */
1799 if (GET_CODE (function) != SYMBOL_REF)
1800 function = memory_address (FUNCTION_MODE, function);
1802 /* Generate the actual call instruction and save the return value. */
1803 if (targetm.have_untyped_call ())
1805 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1806 emit_call_insn (targetm.gen_untyped_call (mem, result,
1807 result_vector (1, result)));
1809 else if (targetm.have_call_value ())
1811 rtx valreg = 0;
1813 /* Locate the unique return register. It is not possible to
1814 express a call that sets more than one return register using
1815 call_value; use untyped_call for that. In fact, untyped_call
1816 only needs to save the return registers in the given block. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_result_mode[regno]) != VOIDmode)
1820 gcc_assert (!valreg); /* have_untyped_call required. */
1822 valreg = gen_rtx_REG (mode, regno);
1825 emit_insn (targetm.gen_call_value (valreg,
1826 gen_rtx_MEM (FUNCTION_MODE, function),
1827 const0_rtx, NULL_RTX, const0_rtx));
1829 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1831 else
1832 gcc_unreachable ();
1834 /* Find the CALL insn we just emitted, and attach the register usage
1835 information. */
1836 call_insn = last_call_insn ();
1837 add_function_usage_to (call_insn, call_fusage);
1839 /* Restore the stack. */
1840 if (targetm.have_save_stack_nonlocal ())
1841 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1842 else
1843 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1844 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1846 OK_DEFER_POP;
1848 /* Return the address of the result block. */
1849 result = copy_addr_to_reg (XEXP (result, 0));
1850 return convert_memory_address (ptr_mode, result);
1853 /* Perform an untyped return. */
1855 static void
1856 expand_builtin_return (rtx result)
1858 int size, align, regno;
1859 fixed_size_mode mode;
1860 rtx reg;
1861 rtx_insn *call_fusage = 0;
1863 result = convert_memory_address (Pmode, result);
1865 apply_result_size ();
1866 result = gen_rtx_MEM (BLKmode, result);
1868 if (targetm.have_untyped_return ())
1870 rtx vector = result_vector (0, result);
1871 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1872 emit_barrier ();
1873 return;
1876 /* Restore the return value and note that each value is used. */
1877 size = 0;
1878 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1879 if ((mode = apply_result_mode[regno]) != VOIDmode)
1881 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1882 if (size % align != 0)
1883 size = CEIL (size, align) * align;
1884 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1885 emit_move_insn (reg, adjust_address (result, mode, size));
1887 push_to_sequence (call_fusage);
1888 emit_use (reg);
1889 call_fusage = get_insns ();
1890 end_sequence ();
1891 size += GET_MODE_SIZE (mode);
1894 /* Put the USE insns before the return. */
1895 emit_insn (call_fusage);
1897 /* Return whatever values was restored by jumping directly to the end
1898 of the function. */
1899 expand_naked_return ();
1902 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1904 static enum type_class
1905 type_to_class (tree type)
1907 switch (TREE_CODE (type))
1909 case VOID_TYPE: return void_type_class;
1910 case INTEGER_TYPE: return integer_type_class;
1911 case ENUMERAL_TYPE: return enumeral_type_class;
1912 case BOOLEAN_TYPE: return boolean_type_class;
1913 case POINTER_TYPE: return pointer_type_class;
1914 case REFERENCE_TYPE: return reference_type_class;
1915 case OFFSET_TYPE: return offset_type_class;
1916 case REAL_TYPE: return real_type_class;
1917 case COMPLEX_TYPE: return complex_type_class;
1918 case FUNCTION_TYPE: return function_type_class;
1919 case METHOD_TYPE: return method_type_class;
1920 case RECORD_TYPE: return record_type_class;
1921 case UNION_TYPE:
1922 case QUAL_UNION_TYPE: return union_type_class;
1923 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1924 ? string_type_class : array_type_class);
1925 case LANG_TYPE: return lang_type_class;
1926 default: return no_type_class;
1930 /* Expand a call EXP to __builtin_classify_type. */
1932 static rtx
1933 expand_builtin_classify_type (tree exp)
1935 if (call_expr_nargs (exp))
1936 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1937 return GEN_INT (no_type_class);
1940 /* This helper macro, meant to be used in mathfn_built_in below, determines
1941 which among a set of builtin math functions is appropriate for a given type
1942 mode. The `F' (float) and `L' (long double) are automatically generated
1943 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1944 types, there are additional types that are considered with 'F32', 'F64',
1945 'F128', etc. suffixes. */
1946 #define CASE_MATHFN(MATHFN) \
1947 CASE_CFN_##MATHFN: \
1948 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1949 fcodel = BUILT_IN_##MATHFN##L ; break;
1950 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1951 types. */
1952 #define CASE_MATHFN_FLOATN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1956 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1957 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1958 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1959 break;
1960 /* Similar to above, but appends _R after any F/L suffix. */
1961 #define CASE_MATHFN_REENT(MATHFN) \
1962 case CFN_BUILT_IN_##MATHFN##_R: \
1963 case CFN_BUILT_IN_##MATHFN##F_R: \
1964 case CFN_BUILT_IN_##MATHFN##L_R: \
1965 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1966 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1968 /* Return a function equivalent to FN but operating on floating-point
1969 values of type TYPE, or END_BUILTINS if no such function exists.
1970 This is purely an operation on function codes; it does not guarantee
1971 that the target actually has an implementation of the function. */
1973 static built_in_function
1974 mathfn_built_in_2 (tree type, combined_fn fn)
1976 tree mtype;
1977 built_in_function fcode, fcodef, fcodel;
1978 built_in_function fcodef16 = END_BUILTINS;
1979 built_in_function fcodef32 = END_BUILTINS;
1980 built_in_function fcodef64 = END_BUILTINS;
1981 built_in_function fcodef128 = END_BUILTINS;
1982 built_in_function fcodef32x = END_BUILTINS;
1983 built_in_function fcodef64x = END_BUILTINS;
1984 built_in_function fcodef128x = END_BUILTINS;
1986 switch (fn)
1988 CASE_MATHFN (ACOS)
1989 CASE_MATHFN (ACOSH)
1990 CASE_MATHFN (ASIN)
1991 CASE_MATHFN (ASINH)
1992 CASE_MATHFN (ATAN)
1993 CASE_MATHFN (ATAN2)
1994 CASE_MATHFN (ATANH)
1995 CASE_MATHFN (CBRT)
1996 CASE_MATHFN_FLOATN (CEIL)
1997 CASE_MATHFN (CEXPI)
1998 CASE_MATHFN_FLOATN (COPYSIGN)
1999 CASE_MATHFN (COS)
2000 CASE_MATHFN (COSH)
2001 CASE_MATHFN (DREM)
2002 CASE_MATHFN (ERF)
2003 CASE_MATHFN (ERFC)
2004 CASE_MATHFN (EXP)
2005 CASE_MATHFN (EXP10)
2006 CASE_MATHFN (EXP2)
2007 CASE_MATHFN (EXPM1)
2008 CASE_MATHFN (FABS)
2009 CASE_MATHFN (FDIM)
2010 CASE_MATHFN_FLOATN (FLOOR)
2011 CASE_MATHFN_FLOATN (FMA)
2012 CASE_MATHFN_FLOATN (FMAX)
2013 CASE_MATHFN_FLOATN (FMIN)
2014 CASE_MATHFN (FMOD)
2015 CASE_MATHFN (FREXP)
2016 CASE_MATHFN (GAMMA)
2017 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2018 CASE_MATHFN (HUGE_VAL)
2019 CASE_MATHFN (HYPOT)
2020 CASE_MATHFN (ILOGB)
2021 CASE_MATHFN (ICEIL)
2022 CASE_MATHFN (IFLOOR)
2023 CASE_MATHFN (INF)
2024 CASE_MATHFN (IRINT)
2025 CASE_MATHFN (IROUND)
2026 CASE_MATHFN (ISINF)
2027 CASE_MATHFN (J0)
2028 CASE_MATHFN (J1)
2029 CASE_MATHFN (JN)
2030 CASE_MATHFN (LCEIL)
2031 CASE_MATHFN (LDEXP)
2032 CASE_MATHFN (LFLOOR)
2033 CASE_MATHFN (LGAMMA)
2034 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2035 CASE_MATHFN (LLCEIL)
2036 CASE_MATHFN (LLFLOOR)
2037 CASE_MATHFN (LLRINT)
2038 CASE_MATHFN (LLROUND)
2039 CASE_MATHFN (LOG)
2040 CASE_MATHFN (LOG10)
2041 CASE_MATHFN (LOG1P)
2042 CASE_MATHFN (LOG2)
2043 CASE_MATHFN (LOGB)
2044 CASE_MATHFN (LRINT)
2045 CASE_MATHFN (LROUND)
2046 CASE_MATHFN (MODF)
2047 CASE_MATHFN (NAN)
2048 CASE_MATHFN (NANS)
2049 CASE_MATHFN_FLOATN (NEARBYINT)
2050 CASE_MATHFN (NEXTAFTER)
2051 CASE_MATHFN (NEXTTOWARD)
2052 CASE_MATHFN (POW)
2053 CASE_MATHFN (POWI)
2054 CASE_MATHFN (POW10)
2055 CASE_MATHFN (REMAINDER)
2056 CASE_MATHFN (REMQUO)
2057 CASE_MATHFN_FLOATN (RINT)
2058 CASE_MATHFN_FLOATN (ROUND)
2059 CASE_MATHFN (SCALB)
2060 CASE_MATHFN (SCALBLN)
2061 CASE_MATHFN (SCALBN)
2062 CASE_MATHFN (SIGNBIT)
2063 CASE_MATHFN (SIGNIFICAND)
2064 CASE_MATHFN (SIN)
2065 CASE_MATHFN (SINCOS)
2066 CASE_MATHFN (SINH)
2067 CASE_MATHFN_FLOATN (SQRT)
2068 CASE_MATHFN (TAN)
2069 CASE_MATHFN (TANH)
2070 CASE_MATHFN (TGAMMA)
2071 CASE_MATHFN_FLOATN (TRUNC)
2072 CASE_MATHFN (Y0)
2073 CASE_MATHFN (Y1)
2074 CASE_MATHFN (YN)
2076 default:
2077 return END_BUILTINS;
2080 mtype = TYPE_MAIN_VARIANT (type);
2081 if (mtype == double_type_node)
2082 return fcode;
2083 else if (mtype == float_type_node)
2084 return fcodef;
2085 else if (mtype == long_double_type_node)
2086 return fcodel;
2087 else if (mtype == float16_type_node)
2088 return fcodef16;
2089 else if (mtype == float32_type_node)
2090 return fcodef32;
2091 else if (mtype == float64_type_node)
2092 return fcodef64;
2093 else if (mtype == float128_type_node)
2094 return fcodef128;
2095 else if (mtype == float32x_type_node)
2096 return fcodef32x;
2097 else if (mtype == float64x_type_node)
2098 return fcodef64x;
2099 else if (mtype == float128x_type_node)
2100 return fcodef128x;
2101 else
2102 return END_BUILTINS;
2105 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2106 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2107 otherwise use the explicit declaration. If we can't do the conversion,
2108 return null. */
2110 static tree
2111 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2113 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2114 if (fcode2 == END_BUILTINS)
2115 return NULL_TREE;
2117 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2118 return NULL_TREE;
2120 return builtin_decl_explicit (fcode2);
2123 /* Like mathfn_built_in_1, but always use the implicit array. */
2125 tree
2126 mathfn_built_in (tree type, combined_fn fn)
2128 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2131 /* Like mathfn_built_in_1, but take a built_in_function and
2132 always use the implicit array. */
2134 tree
2135 mathfn_built_in (tree type, enum built_in_function fn)
2137 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2140 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2141 return its code, otherwise return IFN_LAST. Note that this function
2142 only tests whether the function is defined in internals.def, not whether
2143 it is actually available on the target. */
2145 internal_fn
2146 associated_internal_fn (tree fndecl)
2148 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2149 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2150 switch (DECL_FUNCTION_CODE (fndecl))
2152 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2153 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2154 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2155 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2156 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2157 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2158 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2159 #include "internal-fn.def"
2161 CASE_FLT_FN (BUILT_IN_POW10):
2162 return IFN_EXP10;
2164 CASE_FLT_FN (BUILT_IN_DREM):
2165 return IFN_REMAINDER;
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2170 return IFN_LDEXP;
2171 return IFN_LAST;
2173 default:
2174 return IFN_LAST;
2178 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2179 on the current target by a call to an internal function, return the
2180 code of that internal function, otherwise return IFN_LAST. The caller
2181 is responsible for ensuring that any side-effects of the built-in
2182 call are dealt with correctly. E.g. if CALL sets errno, the caller
2183 must decide that the errno result isn't needed or make it available
2184 in some other way. */
2186 internal_fn
2187 replacement_internal_fn (gcall *call)
2189 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2191 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2192 if (ifn != IFN_LAST)
2194 tree_pair types = direct_internal_fn_types (ifn, call);
2195 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2196 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2197 return ifn;
2200 return IFN_LAST;
2203 /* Expand a call to the builtin trinary math functions (fma).
2204 Return NULL_RTX if a normal call should be emitted rather than expanding the
2205 function in-line. EXP is the expression that is a call to the builtin
2206 function; if convenient, the result should be placed in TARGET.
2207 SUBTARGET may be used as the target for computing one of EXP's
2208 operands. */
2210 static rtx
2211 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2213 optab builtin_optab;
2214 rtx op0, op1, op2, result;
2215 rtx_insn *insns;
2216 tree fndecl = get_callee_fndecl (exp);
2217 tree arg0, arg1, arg2;
2218 machine_mode mode;
2220 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2221 return NULL_RTX;
2223 arg0 = CALL_EXPR_ARG (exp, 0);
2224 arg1 = CALL_EXPR_ARG (exp, 1);
2225 arg2 = CALL_EXPR_ARG (exp, 2);
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_FMA):
2230 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2231 builtin_optab = fma_optab; break;
2232 default:
2233 gcc_unreachable ();
2236 /* Make a suitable register to place result in. */
2237 mode = TYPE_MODE (TREE_TYPE (exp));
2239 /* Before working hard, check whether the instruction is available. */
2240 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2241 return NULL_RTX;
2243 result = gen_reg_rtx (mode);
2245 /* Always stabilize the argument list. */
2246 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2247 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2248 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2250 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2251 op1 = expand_normal (arg1);
2252 op2 = expand_normal (arg2);
2254 start_sequence ();
2256 /* Compute into RESULT.
2257 Set RESULT to wherever the result comes back. */
2258 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2259 result, 0);
2261 /* If we were unable to expand via the builtin, stop the sequence
2262 (without outputting the insns) and call to the library function
2263 with the stabilized argument list. */
2264 if (result == 0)
2266 end_sequence ();
2267 return expand_call (exp, target, target == const0_rtx);
2270 /* Output the entire sequence. */
2271 insns = get_insns ();
2272 end_sequence ();
2273 emit_insn (insns);
2275 return result;
2278 /* Expand a call to the builtin sin and cos math functions.
2279 Return NULL_RTX if a normal call should be emitted rather than expanding the
2280 function in-line. EXP is the expression that is a call to the builtin
2281 function; if convenient, the result should be placed in TARGET.
2282 SUBTARGET may be used as the target for computing one of EXP's
2283 operands. */
2285 static rtx
2286 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2288 optab builtin_optab;
2289 rtx op0;
2290 rtx_insn *insns;
2291 tree fndecl = get_callee_fndecl (exp);
2292 machine_mode mode;
2293 tree arg;
2295 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2296 return NULL_RTX;
2298 arg = CALL_EXPR_ARG (exp, 0);
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 CASE_FLT_FN (BUILT_IN_SIN):
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = sincos_optab; break;
2305 default:
2306 gcc_unreachable ();
2309 /* Make a suitable register to place result in. */
2310 mode = TYPE_MODE (TREE_TYPE (exp));
2312 /* Check if sincos insn is available, otherwise fallback
2313 to sin or cos insn. */
2314 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2315 switch (DECL_FUNCTION_CODE (fndecl))
2317 CASE_FLT_FN (BUILT_IN_SIN):
2318 builtin_optab = sin_optab; break;
2319 CASE_FLT_FN (BUILT_IN_COS):
2320 builtin_optab = cos_optab; break;
2321 default:
2322 gcc_unreachable ();
2325 /* Before working hard, check whether the instruction is available. */
2326 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2328 rtx result = gen_reg_rtx (mode);
2330 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2331 need to expand the argument again. This way, we will not perform
2332 side-effects more the once. */
2333 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2335 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2337 start_sequence ();
2339 /* Compute into RESULT.
2340 Set RESULT to wherever the result comes back. */
2341 if (builtin_optab == sincos_optab)
2343 int ok;
2345 switch (DECL_FUNCTION_CODE (fndecl))
2347 CASE_FLT_FN (BUILT_IN_SIN):
2348 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2349 break;
2350 CASE_FLT_FN (BUILT_IN_COS):
2351 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2352 break;
2353 default:
2354 gcc_unreachable ();
2356 gcc_assert (ok);
2358 else
2359 result = expand_unop (mode, builtin_optab, op0, result, 0);
2361 if (result != 0)
2363 /* Output the entire sequence. */
2364 insns = get_insns ();
2365 end_sequence ();
2366 emit_insn (insns);
2367 return result;
2370 /* If we were unable to expand via the builtin, stop the sequence
2371 (without outputting the insns) and call to the library function
2372 with the stabilized argument list. */
2373 end_sequence ();
2376 return expand_call (exp, target, target == const0_rtx);
2379 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2380 return an RTL instruction code that implements the functionality.
2381 If that isn't possible or available return CODE_FOR_nothing. */
2383 static enum insn_code
2384 interclass_mathfn_icode (tree arg, tree fndecl)
2386 bool errno_set = false;
2387 optab builtin_optab = unknown_optab;
2388 machine_mode mode;
2390 switch (DECL_FUNCTION_CODE (fndecl))
2392 CASE_FLT_FN (BUILT_IN_ILOGB):
2393 errno_set = true; builtin_optab = ilogb_optab; break;
2394 CASE_FLT_FN (BUILT_IN_ISINF):
2395 builtin_optab = isinf_optab; break;
2396 case BUILT_IN_ISNORMAL:
2397 case BUILT_IN_ISFINITE:
2398 CASE_FLT_FN (BUILT_IN_FINITE):
2399 case BUILT_IN_FINITED32:
2400 case BUILT_IN_FINITED64:
2401 case BUILT_IN_FINITED128:
2402 case BUILT_IN_ISINFD32:
2403 case BUILT_IN_ISINFD64:
2404 case BUILT_IN_ISINFD128:
2405 /* These builtins have no optabs (yet). */
2406 break;
2407 default:
2408 gcc_unreachable ();
2411 /* There's no easy way to detect the case we need to set EDOM. */
2412 if (flag_errno_math && errno_set)
2413 return CODE_FOR_nothing;
2415 /* Optab mode depends on the mode of the input argument. */
2416 mode = TYPE_MODE (TREE_TYPE (arg));
2418 if (builtin_optab)
2419 return optab_handler (builtin_optab, mode);
2420 return CODE_FOR_nothing;
2423 /* Expand a call to one of the builtin math functions that operate on
2424 floating point argument and output an integer result (ilogb, isinf,
2425 isnan, etc).
2426 Return 0 if a normal call should be emitted rather than expanding the
2427 function in-line. EXP is the expression that is a call to the builtin
2428 function; if convenient, the result should be placed in TARGET. */
2430 static rtx
2431 expand_builtin_interclass_mathfn (tree exp, rtx target)
2433 enum insn_code icode = CODE_FOR_nothing;
2434 rtx op0;
2435 tree fndecl = get_callee_fndecl (exp);
2436 machine_mode mode;
2437 tree arg;
2439 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2440 return NULL_RTX;
2442 arg = CALL_EXPR_ARG (exp, 0);
2443 icode = interclass_mathfn_icode (arg, fndecl);
2444 mode = TYPE_MODE (TREE_TYPE (arg));
2446 if (icode != CODE_FOR_nothing)
2448 struct expand_operand ops[1];
2449 rtx_insn *last = get_last_insn ();
2450 tree orig_arg = arg;
2452 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2453 need to expand the argument again. This way, we will not perform
2454 side-effects more the once. */
2455 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2457 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2459 if (mode != GET_MODE (op0))
2460 op0 = convert_to_mode (mode, op0, 0);
2462 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2463 if (maybe_legitimize_operands (icode, 0, 1, ops)
2464 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2465 return ops[0].value;
2467 delete_insns_since (last);
2468 CALL_EXPR_ARG (exp, 0) = orig_arg;
2471 return NULL_RTX;
2474 /* Expand a call to the builtin sincos math function.
2475 Return NULL_RTX if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function. */
2479 static rtx
2480 expand_builtin_sincos (tree exp)
2482 rtx op0, op1, op2, target1, target2;
2483 machine_mode mode;
2484 tree arg, sinp, cosp;
2485 int result;
2486 location_t loc = EXPR_LOCATION (exp);
2487 tree alias_type, alias_off;
2489 if (!validate_arglist (exp, REAL_TYPE,
2490 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2491 return NULL_RTX;
2493 arg = CALL_EXPR_ARG (exp, 0);
2494 sinp = CALL_EXPR_ARG (exp, 1);
2495 cosp = CALL_EXPR_ARG (exp, 2);
2497 /* Make a suitable register to place result in. */
2498 mode = TYPE_MODE (TREE_TYPE (arg));
2500 /* Check if sincos insn is available, otherwise emit the call. */
2501 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2502 return NULL_RTX;
2504 target1 = gen_reg_rtx (mode);
2505 target2 = gen_reg_rtx (mode);
2507 op0 = expand_normal (arg);
2508 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2509 alias_off = build_int_cst (alias_type, 0);
2510 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2511 sinp, alias_off));
2512 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2513 cosp, alias_off));
2515 /* Compute into target1 and target2.
2516 Set TARGET to wherever the result comes back. */
2517 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2518 gcc_assert (result);
2520 /* Move target1 and target2 to the memory locations indicated
2521 by op1 and op2. */
2522 emit_move_insn (op1, target1);
2523 emit_move_insn (op2, target2);
2525 return const0_rtx;
2528 /* Expand a call to the internal cexpi builtin to the sincos math function.
2529 EXP is the expression that is a call to the builtin function; if convenient,
2530 the result should be placed in TARGET. */
2532 static rtx
2533 expand_builtin_cexpi (tree exp, rtx target)
2535 tree fndecl = get_callee_fndecl (exp);
2536 tree arg, type;
2537 machine_mode mode;
2538 rtx op0, op1, op2;
2539 location_t loc = EXPR_LOCATION (exp);
2541 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2542 return NULL_RTX;
2544 arg = CALL_EXPR_ARG (exp, 0);
2545 type = TREE_TYPE (arg);
2546 mode = TYPE_MODE (TREE_TYPE (arg));
2548 /* Try expanding via a sincos optab, fall back to emitting a libcall
2549 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2550 is only generated from sincos, cexp or if we have either of them. */
2551 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2553 op1 = gen_reg_rtx (mode);
2554 op2 = gen_reg_rtx (mode);
2556 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2558 /* Compute into op1 and op2. */
2559 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2561 else if (targetm.libc_has_function (function_sincos))
2563 tree call, fn = NULL_TREE;
2564 tree top1, top2;
2565 rtx op1a, op2a;
2567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2568 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2569 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2570 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2571 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2572 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2573 else
2574 gcc_unreachable ();
2576 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2577 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2578 op1a = copy_addr_to_reg (XEXP (op1, 0));
2579 op2a = copy_addr_to_reg (XEXP (op2, 0));
2580 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2581 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2583 /* Make sure not to fold the sincos call again. */
2584 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2585 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2586 call, 3, arg, top1, top2));
2588 else
2590 tree call, fn = NULL_TREE, narg;
2591 tree ctype = build_complex_type (type);
2593 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2594 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2595 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2596 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2598 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2599 else
2600 gcc_unreachable ();
2602 /* If we don't have a decl for cexp create one. This is the
2603 friendliest fallback if the user calls __builtin_cexpi
2604 without full target C99 function support. */
2605 if (fn == NULL_TREE)
2607 tree fntype;
2608 const char *name = NULL;
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2611 name = "cexpf";
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2613 name = "cexp";
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2615 name = "cexpl";
2617 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2618 fn = build_fn_decl (name, fntype);
2621 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2622 build_real (type, dconst0), arg);
2624 /* Make sure not to fold the cexp call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2626 return expand_expr (build_call_nary (ctype, call, 1, narg),
2627 target, VOIDmode, EXPAND_NORMAL);
2630 /* Now build the proper return type. */
2631 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2632 make_tree (TREE_TYPE (arg), op2),
2633 make_tree (TREE_TYPE (arg), op1)),
2634 target, VOIDmode, EXPAND_NORMAL);
2637 /* Conveniently construct a function call expression. FNDECL names the
2638 function to be called, N is the number of arguments, and the "..."
2639 parameters are the argument expressions. Unlike build_call_exr
2640 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2642 static tree
2643 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2645 va_list ap;
2646 tree fntype = TREE_TYPE (fndecl);
2647 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2649 va_start (ap, n);
2650 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2651 va_end (ap);
2652 SET_EXPR_LOCATION (fn, loc);
2653 return fn;
2656 /* Expand a call to one of the builtin rounding functions gcc defines
2657 as an extension (lfloor and lceil). As these are gcc extensions we
2658 do not need to worry about setting errno to EDOM.
2659 If expanding via optab fails, lower expression to (int)(floor(x)).
2660 EXP is the expression that is a call to the builtin function;
2661 if convenient, the result should be placed in TARGET. */
2663 static rtx
2664 expand_builtin_int_roundingfn (tree exp, rtx target)
2666 convert_optab builtin_optab;
2667 rtx op0, tmp;
2668 rtx_insn *insns;
2669 tree fndecl = get_callee_fndecl (exp);
2670 enum built_in_function fallback_fn;
2671 tree fallback_fndecl;
2672 machine_mode mode;
2673 tree arg;
2675 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2676 return NULL_RTX;
2678 arg = CALL_EXPR_ARG (exp, 0);
2680 switch (DECL_FUNCTION_CODE (fndecl))
2682 CASE_FLT_FN (BUILT_IN_ICEIL):
2683 CASE_FLT_FN (BUILT_IN_LCEIL):
2684 CASE_FLT_FN (BUILT_IN_LLCEIL):
2685 builtin_optab = lceil_optab;
2686 fallback_fn = BUILT_IN_CEIL;
2687 break;
2689 CASE_FLT_FN (BUILT_IN_IFLOOR):
2690 CASE_FLT_FN (BUILT_IN_LFLOOR):
2691 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2692 builtin_optab = lfloor_optab;
2693 fallback_fn = BUILT_IN_FLOOR;
2694 break;
2696 default:
2697 gcc_unreachable ();
2700 /* Make a suitable register to place result in. */
2701 mode = TYPE_MODE (TREE_TYPE (exp));
2703 target = gen_reg_rtx (mode);
2705 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2706 need to expand the argument again. This way, we will not perform
2707 side-effects more the once. */
2708 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2710 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2712 start_sequence ();
2714 /* Compute into TARGET. */
2715 if (expand_sfix_optab (target, op0, builtin_optab))
2717 /* Output the entire sequence. */
2718 insns = get_insns ();
2719 end_sequence ();
2720 emit_insn (insns);
2721 return target;
2724 /* If we were unable to expand via the builtin, stop the sequence
2725 (without outputting the insns). */
2726 end_sequence ();
2728 /* Fall back to floating point rounding optab. */
2729 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2731 /* For non-C99 targets we may end up without a fallback fndecl here
2732 if the user called __builtin_lfloor directly. In this case emit
2733 a call to the floor/ceil variants nevertheless. This should result
2734 in the best user experience for not full C99 targets. */
2735 if (fallback_fndecl == NULL_TREE)
2737 tree fntype;
2738 const char *name = NULL;
2740 switch (DECL_FUNCTION_CODE (fndecl))
2742 case BUILT_IN_ICEIL:
2743 case BUILT_IN_LCEIL:
2744 case BUILT_IN_LLCEIL:
2745 name = "ceil";
2746 break;
2747 case BUILT_IN_ICEILF:
2748 case BUILT_IN_LCEILF:
2749 case BUILT_IN_LLCEILF:
2750 name = "ceilf";
2751 break;
2752 case BUILT_IN_ICEILL:
2753 case BUILT_IN_LCEILL:
2754 case BUILT_IN_LLCEILL:
2755 name = "ceill";
2756 break;
2757 case BUILT_IN_IFLOOR:
2758 case BUILT_IN_LFLOOR:
2759 case BUILT_IN_LLFLOOR:
2760 name = "floor";
2761 break;
2762 case BUILT_IN_IFLOORF:
2763 case BUILT_IN_LFLOORF:
2764 case BUILT_IN_LLFLOORF:
2765 name = "floorf";
2766 break;
2767 case BUILT_IN_IFLOORL:
2768 case BUILT_IN_LFLOORL:
2769 case BUILT_IN_LLFLOORL:
2770 name = "floorl";
2771 break;
2772 default:
2773 gcc_unreachable ();
2776 fntype = build_function_type_list (TREE_TYPE (arg),
2777 TREE_TYPE (arg), NULL_TREE);
2778 fallback_fndecl = build_fn_decl (name, fntype);
2781 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2783 tmp = expand_normal (exp);
2784 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2786 /* Truncate the result of floating point optab to integer
2787 via expand_fix (). */
2788 target = gen_reg_rtx (mode);
2789 expand_fix (target, tmp, 0);
2791 return target;
2794 /* Expand a call to one of the builtin math functions doing integer
2795 conversion (lrint).
2796 Return 0 if a normal call should be emitted rather than expanding the
2797 function in-line. EXP is the expression that is a call to the builtin
2798 function; if convenient, the result should be placed in TARGET. */
2800 static rtx
2801 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2803 convert_optab builtin_optab;
2804 rtx op0;
2805 rtx_insn *insns;
2806 tree fndecl = get_callee_fndecl (exp);
2807 tree arg;
2808 machine_mode mode;
2809 enum built_in_function fallback_fn = BUILT_IN_NONE;
2811 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2812 return NULL_RTX;
2814 arg = CALL_EXPR_ARG (exp, 0);
2816 switch (DECL_FUNCTION_CODE (fndecl))
2818 CASE_FLT_FN (BUILT_IN_IRINT):
2819 fallback_fn = BUILT_IN_LRINT;
2820 gcc_fallthrough ();
2821 CASE_FLT_FN (BUILT_IN_LRINT):
2822 CASE_FLT_FN (BUILT_IN_LLRINT):
2823 builtin_optab = lrint_optab;
2824 break;
2826 CASE_FLT_FN (BUILT_IN_IROUND):
2827 fallback_fn = BUILT_IN_LROUND;
2828 gcc_fallthrough ();
2829 CASE_FLT_FN (BUILT_IN_LROUND):
2830 CASE_FLT_FN (BUILT_IN_LLROUND):
2831 builtin_optab = lround_optab;
2832 break;
2834 default:
2835 gcc_unreachable ();
2838 /* There's no easy way to detect the case we need to set EDOM. */
2839 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2840 return NULL_RTX;
2842 /* Make a suitable register to place result in. */
2843 mode = TYPE_MODE (TREE_TYPE (exp));
2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (!flag_errno_math)
2848 rtx result = gen_reg_rtx (mode);
2850 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2851 need to expand the argument again. This way, we will not perform
2852 side-effects more the once. */
2853 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2855 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2857 start_sequence ();
2859 if (expand_sfix_optab (result, op0, builtin_optab))
2861 /* Output the entire sequence. */
2862 insns = get_insns ();
2863 end_sequence ();
2864 emit_insn (insns);
2865 return result;
2868 /* If we were unable to expand via the builtin, stop the sequence
2869 (without outputting the insns) and call to the library function
2870 with the stabilized argument list. */
2871 end_sequence ();
2874 if (fallback_fn != BUILT_IN_NONE)
2876 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2877 targets, (int) round (x) should never be transformed into
2878 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2879 a call to lround in the hope that the target provides at least some
2880 C99 functions. This should result in the best user experience for
2881 not full C99 targets. */
2882 tree fallback_fndecl = mathfn_built_in_1
2883 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2885 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2886 fallback_fndecl, 1, arg);
2888 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2889 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2890 return convert_to_mode (mode, target, 0);
2893 return expand_call (exp, target, target == const0_rtx);
2896 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2897 a normal call should be emitted rather than expanding the function
2898 in-line. EXP is the expression that is a call to the builtin
2899 function; if convenient, the result should be placed in TARGET. */
2901 static rtx
2902 expand_builtin_powi (tree exp, rtx target)
2904 tree arg0, arg1;
2905 rtx op0, op1;
2906 machine_mode mode;
2907 machine_mode mode2;
2909 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2910 return NULL_RTX;
2912 arg0 = CALL_EXPR_ARG (exp, 0);
2913 arg1 = CALL_EXPR_ARG (exp, 1);
2914 mode = TYPE_MODE (TREE_TYPE (exp));
2916 /* Emit a libcall to libgcc. */
2918 /* Mode of the 2nd argument must match that of an int. */
2919 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2921 if (target == NULL_RTX)
2922 target = gen_reg_rtx (mode);
2924 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2925 if (GET_MODE (op0) != mode)
2926 op0 = convert_to_mode (mode, op0, 0);
2927 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2928 if (GET_MODE (op1) != mode2)
2929 op1 = convert_to_mode (mode2, op1, 0);
2931 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2932 target, LCT_CONST, mode,
2933 op0, mode, op1, mode2);
2935 return target;
2938 /* Expand expression EXP which is a call to the strlen builtin. Return
2939 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2940 try to get the result in TARGET, if convenient. */
2942 static rtx
2943 expand_builtin_strlen (tree exp, rtx target,
2944 machine_mode target_mode)
2946 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2947 return NULL_RTX;
2949 struct expand_operand ops[4];
2950 rtx pat;
2951 tree len;
2952 tree src = CALL_EXPR_ARG (exp, 0);
2953 rtx src_reg;
2954 rtx_insn *before_strlen;
2955 machine_mode insn_mode;
2956 enum insn_code icode = CODE_FOR_nothing;
2957 unsigned int align;
2959 /* If the length can be computed at compile-time, return it. */
2960 len = c_strlen (src, 0);
2961 if (len)
2962 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2964 /* If the length can be computed at compile-time and is constant
2965 integer, but there are side-effects in src, evaluate
2966 src for side-effects, then return len.
2967 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2968 can be optimized into: i++; x = 3; */
2969 len = c_strlen (src, 1);
2970 if (len && TREE_CODE (len) == INTEGER_CST)
2972 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2976 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2978 /* If SRC is not a pointer type, don't do this operation inline. */
2979 if (align == 0)
2980 return NULL_RTX;
2982 /* Bail out if we can't compute strlen in the right mode. */
2983 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2985 icode = optab_handler (strlen_optab, insn_mode);
2986 if (icode != CODE_FOR_nothing)
2987 break;
2989 if (insn_mode == VOIDmode)
2990 return NULL_RTX;
2992 /* Make a place to hold the source address. We will not expand
2993 the actual source until we are sure that the expansion will
2994 not fail -- there are trees that cannot be expanded twice. */
2995 src_reg = gen_reg_rtx (Pmode);
2997 /* Mark the beginning of the strlen sequence so we can emit the
2998 source operand later. */
2999 before_strlen = get_last_insn ();
3001 create_output_operand (&ops[0], target, insn_mode);
3002 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3003 create_integer_operand (&ops[2], 0);
3004 create_integer_operand (&ops[3], align);
3005 if (!maybe_expand_insn (icode, 4, ops))
3006 return NULL_RTX;
3008 /* Check to see if the argument was declared attribute nonstring
3009 and if so, issue a warning since at this point it's not known
3010 to be nul-terminated. */
3011 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3013 /* Now that we are assured of success, expand the source. */
3014 start_sequence ();
3015 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3016 if (pat != src_reg)
3018 #ifdef POINTERS_EXTEND_UNSIGNED
3019 if (GET_MODE (pat) != Pmode)
3020 pat = convert_to_mode (Pmode, pat,
3021 POINTERS_EXTEND_UNSIGNED);
3022 #endif
3023 emit_move_insn (src_reg, pat);
3025 pat = get_insns ();
3026 end_sequence ();
3028 if (before_strlen)
3029 emit_insn_after (pat, before_strlen);
3030 else
3031 emit_insn_before (pat, get_insns ());
3033 /* Return the value in the proper mode for this function. */
3034 if (GET_MODE (ops[0].value) == target_mode)
3035 target = ops[0].value;
3036 else if (target != 0)
3037 convert_move (target, ops[0].value, 0);
3038 else
3039 target = convert_to_mode (target_mode, ops[0].value, 0);
3041 return target;
3044 /* Expand call EXP to the strnlen built-in, returning the result
3045 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3047 static rtx
3048 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3050 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3051 return NULL_RTX;
3053 tree src = CALL_EXPR_ARG (exp, 0);
3054 tree bound = CALL_EXPR_ARG (exp, 1);
3056 if (!bound)
3057 return NULL_RTX;
3059 location_t loc = UNKNOWN_LOCATION;
3060 if (EXPR_HAS_LOCATION (exp))
3061 loc = EXPR_LOCATION (exp);
3063 tree maxobjsize = max_object_size ();
3064 tree func = get_callee_fndecl (exp);
3066 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3067 so these conversions aren't necessary. */
3068 c_strlen_data lendata = { };
3069 tree len = c_strlen (src, 0, &lendata, 1);
3070 if (len)
3071 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3073 if (TREE_CODE (bound) == INTEGER_CST)
3075 if (!TREE_NO_WARNING (exp)
3076 && tree_int_cst_lt (maxobjsize, bound)
3077 && warning_at (loc, OPT_Wstringop_overflow_,
3078 "%K%qD specified bound %E "
3079 "exceeds maximum object size %E",
3080 exp, func, bound, maxobjsize))
3081 TREE_NO_WARNING (exp) = true;
3083 bool exact = true;
3084 if (!len || TREE_CODE (len) != INTEGER_CST)
3086 /* Clear EXACT if LEN may be less than SRC suggests,
3087 such as in
3088 strnlen (&a[i], sizeof a)
3089 where the value of i is unknown. Unless i's value is
3090 zero, the call is unsafe because the bound is greater. */
3091 lendata.decl = unterminated_array (src, &len, &exact);
3092 if (!lendata.decl)
3093 return NULL_RTX;
3096 if (lendata.decl
3097 && !TREE_NO_WARNING (exp)
3098 && ((tree_int_cst_lt (len, bound))
3099 || !exact))
3101 location_t warnloc
3102 = expansion_point_location_if_in_system_header (loc);
3104 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3105 exact
3106 ? G_("%K%qD specified bound %E exceeds the size %E "
3107 "of unterminated array")
3108 : G_("%K%qD specified bound %E may exceed the size "
3109 "of at most %E of unterminated array"),
3110 exp, func, bound, len))
3112 inform (DECL_SOURCE_LOCATION (lendata.decl),
3113 "referenced argument declared here");
3114 TREE_NO_WARNING (exp) = true;
3115 return NULL_RTX;
3119 if (!len)
3120 return NULL_RTX;
3122 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3123 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3126 if (TREE_CODE (bound) != SSA_NAME)
3127 return NULL_RTX;
3129 wide_int min, max;
3130 enum value_range_kind rng = get_range_info (bound, &min, &max);
3131 if (rng != VR_RANGE)
3132 return NULL_RTX;
3134 if (!TREE_NO_WARNING (exp)
3135 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3136 && warning_at (loc, OPT_Wstringop_overflow_,
3137 "%K%qD specified bound [%wu, %wu] "
3138 "exceeds maximum object size %E",
3139 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3140 TREE_NO_WARNING (exp) = true;
3142 bool exact = true;
3143 if (!len || TREE_CODE (len) != INTEGER_CST)
3145 lendata.decl = unterminated_array (src, &len, &exact);
3146 if (!lendata.decl)
3147 return NULL_RTX;
3150 if (lendata.decl
3151 && !TREE_NO_WARNING (exp)
3152 && (wi::ltu_p (wi::to_wide (len), min)
3153 || !exact))
3155 location_t warnloc
3156 = expansion_point_location_if_in_system_header (loc);
3158 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3159 exact
3160 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3161 "the size %E of unterminated array")
3162 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3163 "the size of at most %E of unterminated array"),
3164 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3166 inform (DECL_SOURCE_LOCATION (lendata.decl),
3167 "referenced argument declared here");
3168 TREE_NO_WARNING (exp) = true;
3172 if (lendata.decl)
3173 return NULL_RTX;
3175 if (wi::gtu_p (min, wi::to_wide (len)))
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3178 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3179 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3182 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3183 bytes from constant string DATA + OFFSET and return it as target
3184 constant. */
3186 static rtx
3187 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3188 scalar_int_mode mode)
3190 const char *str = (const char *) data;
3192 gcc_assert (offset >= 0
3193 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3194 <= strlen (str) + 1));
3196 return c_readstr (str + offset, mode);
3199 /* LEN specify length of the block of memcpy/memset operation.
3200 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3201 In some cases we can make very likely guess on max size, then we
3202 set it into PROBABLE_MAX_SIZE. */
3204 static void
3205 determine_block_size (tree len, rtx len_rtx,
3206 unsigned HOST_WIDE_INT *min_size,
3207 unsigned HOST_WIDE_INT *max_size,
3208 unsigned HOST_WIDE_INT *probable_max_size)
3210 if (CONST_INT_P (len_rtx))
3212 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3213 return;
3215 else
3217 wide_int min, max;
3218 enum value_range_kind range_type = VR_UNDEFINED;
3220 /* Determine bounds from the type. */
3221 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3222 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3223 else
3224 *min_size = 0;
3225 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3226 *probable_max_size = *max_size
3227 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3228 else
3229 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3231 if (TREE_CODE (len) == SSA_NAME)
3232 range_type = get_range_info (len, &min, &max);
3233 if (range_type == VR_RANGE)
3235 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3236 *min_size = min.to_uhwi ();
3237 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3238 *probable_max_size = *max_size = max.to_uhwi ();
3240 else if (range_type == VR_ANTI_RANGE)
3242 /* Anti range 0...N lets us to determine minimal size to N+1. */
3243 if (min == 0)
3245 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3246 *min_size = max.to_uhwi () + 1;
3248 /* Code like
3250 int n;
3251 if (n < 100)
3252 memcpy (a, b, n)
3254 Produce anti range allowing negative values of N. We still
3255 can use the information and make a guess that N is not negative.
3257 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3258 *probable_max_size = min.to_uhwi () - 1;
3261 gcc_checking_assert (*max_size <=
3262 (unsigned HOST_WIDE_INT)
3263 GET_MODE_MASK (GET_MODE (len_rtx)));
3266 /* Try to verify that the sizes and lengths of the arguments to a string
3267 manipulation function given by EXP are within valid bounds and that
3268 the operation does not lead to buffer overflow or read past the end.
3269 Arguments other than EXP may be null. When non-null, the arguments
3270 have the following meaning:
3271 DST is the destination of a copy call or NULL otherwise.
3272 SRC is the source of a copy call or NULL otherwise.
3273 DSTWRITE is the number of bytes written into the destination obtained
3274 from the user-supplied size argument to the function (such as in
3275 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3276 MAXREAD is the user-supplied bound on the length of the source sequence
3277 (such as in strncat(d, s, N). It specifies the upper limit on the number
3278 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3279 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3280 expression EXP is a string function call (as opposed to a memory call
3281 like memcpy). As an exception, SRCSTR can also be an integer denoting
3282 the precomputed size of the source string or object (for functions like
3283 memcpy).
3284 DSTSIZE is the size of the destination object specified by the last
3285 argument to the _chk builtins, typically resulting from the expansion
3286 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3287 DSTSIZE).
3289 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3290 SIZE_MAX.
3292 If the call is successfully verified as safe return true, otherwise
3293 return false. */
3295 static bool
3296 check_access (tree exp, tree, tree, tree dstwrite,
3297 tree maxread, tree srcstr, tree dstsize)
3299 int opt = OPT_Wstringop_overflow_;
3301 /* The size of the largest object is half the address space, or
3302 PTRDIFF_MAX. (This is way too permissive.) */
3303 tree maxobjsize = max_object_size ();
3305 /* Either the length of the source string for string functions or
3306 the size of the source object for raw memory functions. */
3307 tree slen = NULL_TREE;
3309 tree range[2] = { NULL_TREE, NULL_TREE };
3311 /* Set to true when the exact number of bytes written by a string
3312 function like strcpy is not known and the only thing that is
3313 known is that it must be at least one (for the terminating nul). */
3314 bool at_least_one = false;
3315 if (srcstr)
3317 /* SRCSTR is normally a pointer to string but as a special case
3318 it can be an integer denoting the length of a string. */
3319 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3321 /* Try to determine the range of lengths the source string
3322 refers to. If it can be determined and is less than
3323 the upper bound given by MAXREAD add one to it for
3324 the terminating nul. Otherwise, set it to one for
3325 the same reason, or to MAXREAD as appropriate. */
3326 c_strlen_data lendata = { };
3327 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3328 range[0] = lendata.minlen;
3329 range[1] = lendata.maxbound;
3330 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3332 if (maxread && tree_int_cst_le (maxread, range[0]))
3333 range[0] = range[1] = maxread;
3334 else
3335 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3336 range[0], size_one_node);
3338 if (maxread && tree_int_cst_le (maxread, range[1]))
3339 range[1] = maxread;
3340 else if (!integer_all_onesp (range[1]))
3341 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3342 range[1], size_one_node);
3344 slen = range[0];
3346 else
3348 at_least_one = true;
3349 slen = size_one_node;
3352 else
3353 slen = srcstr;
3356 if (!dstwrite && !maxread)
3358 /* When the only available piece of data is the object size
3359 there is nothing to do. */
3360 if (!slen)
3361 return true;
3363 /* Otherwise, when the length of the source sequence is known
3364 (as with strlen), set DSTWRITE to it. */
3365 if (!range[0])
3366 dstwrite = slen;
3369 if (!dstsize)
3370 dstsize = maxobjsize;
3372 if (dstwrite)
3373 get_size_range (dstwrite, range);
3375 tree func = get_callee_fndecl (exp);
3377 /* First check the number of bytes to be written against the maximum
3378 object size. */
3379 if (range[0]
3380 && TREE_CODE (range[0]) == INTEGER_CST
3381 && tree_int_cst_lt (maxobjsize, range[0]))
3383 if (TREE_NO_WARNING (exp))
3384 return false;
3386 location_t loc = tree_nonartificial_location (exp);
3387 loc = expansion_point_location_if_in_system_header (loc);
3389 bool warned;
3390 if (range[0] == range[1])
3391 warned = warning_at (loc, opt,
3392 "%K%qD specified size %E "
3393 "exceeds maximum object size %E",
3394 exp, func, range[0], maxobjsize);
3395 else
3396 warned = warning_at (loc, opt,
3397 "%K%qD specified size between %E and %E "
3398 "exceeds maximum object size %E",
3399 exp, func,
3400 range[0], range[1], maxobjsize);
3401 if (warned)
3402 TREE_NO_WARNING (exp) = true;
3404 return false;
3407 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3408 constant, and in range of unsigned HOST_WIDE_INT. */
3409 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3411 /* Next check the number of bytes to be written against the destination
3412 object size. */
3413 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3415 if (range[0]
3416 && TREE_CODE (range[0]) == INTEGER_CST
3417 && ((tree_fits_uhwi_p (dstsize)
3418 && tree_int_cst_lt (dstsize, range[0]))
3419 || (dstwrite
3420 && tree_fits_uhwi_p (dstwrite)
3421 && tree_int_cst_lt (dstwrite, range[0]))))
3423 if (TREE_NO_WARNING (exp))
3424 return false;
3426 location_t loc = tree_nonartificial_location (exp);
3427 loc = expansion_point_location_if_in_system_header (loc);
3429 if (dstwrite == slen && at_least_one)
3431 /* This is a call to strcpy with a destination of 0 size
3432 and a source of unknown length. The call will write
3433 at least one byte past the end of the destination. */
3434 warning_at (loc, opt,
3435 "%K%qD writing %E or more bytes into a region "
3436 "of size %E overflows the destination",
3437 exp, func, range[0], dstsize);
3439 else if (tree_int_cst_equal (range[0], range[1]))
3440 warning_n (loc, opt, tree_to_uhwi (range[0]),
3441 "%K%qD writing %E byte into a region "
3442 "of size %E overflows the destination",
3443 "%K%qD writing %E bytes into a region "
3444 "of size %E overflows the destination",
3445 exp, func, range[0], dstsize);
3446 else if (tree_int_cst_sign_bit (range[1]))
3448 /* Avoid printing the upper bound if it's invalid. */
3449 warning_at (loc, opt,
3450 "%K%qD writing %E or more bytes into a region "
3451 "of size %E overflows the destination",
3452 exp, func, range[0], dstsize);
3454 else
3455 warning_at (loc, opt,
3456 "%K%qD writing between %E and %E bytes into "
3457 "a region of size %E overflows the destination",
3458 exp, func, range[0], range[1],
3459 dstsize);
3461 /* Return error when an overflow has been detected. */
3462 return false;
3466 /* Check the maximum length of the source sequence against the size
3467 of the destination object if known, or against the maximum size
3468 of an object. */
3469 if (maxread)
3471 get_size_range (maxread, range);
3473 /* Use the lower end for MAXREAD from now on. */
3474 if (range[0])
3475 maxread = range[0];
3477 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3479 location_t loc = tree_nonartificial_location (exp);
3480 loc = expansion_point_location_if_in_system_header (loc);
3482 if (tree_int_cst_lt (maxobjsize, range[0]))
3484 if (TREE_NO_WARNING (exp))
3485 return false;
3487 /* Warn about crazy big sizes first since that's more
3488 likely to be meaningful than saying that the bound
3489 is greater than the object size if both are big. */
3490 if (range[0] == range[1])
3491 warning_at (loc, opt,
3492 "%K%qD specified bound %E "
3493 "exceeds maximum object size %E",
3494 exp, func,
3495 range[0], maxobjsize);
3496 else
3497 warning_at (loc, opt,
3498 "%K%qD specified bound between %E and %E "
3499 "exceeds maximum object size %E",
3500 exp, func,
3501 range[0], range[1], maxobjsize);
3503 return false;
3506 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3508 if (TREE_NO_WARNING (exp))
3509 return false;
3511 if (tree_int_cst_equal (range[0], range[1]))
3512 warning_at (loc, opt,
3513 "%K%qD specified bound %E "
3514 "exceeds destination size %E",
3515 exp, func,
3516 range[0], dstsize);
3517 else
3518 warning_at (loc, opt,
3519 "%K%qD specified bound between %E and %E "
3520 "exceeds destination size %E",
3521 exp, func,
3522 range[0], range[1], dstsize);
3523 return false;
3528 /* Check for reading past the end of SRC. */
3529 if (slen
3530 && slen == srcstr
3531 && dstwrite && range[0]
3532 && tree_int_cst_lt (slen, range[0]))
3534 if (TREE_NO_WARNING (exp))
3535 return false;
3537 location_t loc = tree_nonartificial_location (exp);
3539 if (tree_int_cst_equal (range[0], range[1]))
3540 warning_n (loc, opt, tree_to_uhwi (range[0]),
3541 "%K%qD reading %E byte from a region of size %E",
3542 "%K%qD reading %E bytes from a region of size %E",
3543 exp, func, range[0], slen);
3544 else if (tree_int_cst_sign_bit (range[1]))
3546 /* Avoid printing the upper bound if it's invalid. */
3547 warning_at (loc, opt,
3548 "%K%qD reading %E or more bytes from a region "
3549 "of size %E",
3550 exp, func, range[0], slen);
3552 else
3553 warning_at (loc, opt,
3554 "%K%qD reading between %E and %E bytes from a region "
3555 "of size %E",
3556 exp, func, range[0], range[1], slen);
3557 return false;
3560 return true;
3563 /* Helper to compute the size of the object referenced by the DEST
3564 expression which must have pointer type, using Object Size type
3565 OSTYPE (only the least significant 2 bits are used). Return
3566 an estimate of the size of the object if successful or NULL when
3567 the size cannot be determined. When the referenced object involves
3568 a non-constant offset in some range the returned value represents
3569 the largest size given the smallest non-negative offset in the
3570 range. The function is intended for diagnostics and should not
3571 be used to influence code generation or optimization. */
3573 tree
3574 compute_objsize (tree dest, int ostype)
3576 unsigned HOST_WIDE_INT size;
3578 /* Only the two least significant bits are meaningful. */
3579 ostype &= 3;
3581 if (compute_builtin_object_size (dest, ostype, &size))
3582 return build_int_cst (sizetype, size);
3584 if (TREE_CODE (dest) == SSA_NAME)
3586 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3587 if (!is_gimple_assign (stmt))
3588 return NULL_TREE;
3590 dest = gimple_assign_rhs1 (stmt);
3592 tree_code code = gimple_assign_rhs_code (stmt);
3593 if (code == POINTER_PLUS_EXPR)
3595 /* compute_builtin_object_size fails for addresses with
3596 non-constant offsets. Try to determine the range of
3597 such an offset here and use it to adjust the constant
3598 size. */
3599 tree off = gimple_assign_rhs2 (stmt);
3600 if (TREE_CODE (off) == INTEGER_CST)
3602 if (tree size = compute_objsize (dest, ostype))
3604 wide_int wioff = wi::to_wide (off);
3605 wide_int wisiz = wi::to_wide (size);
3607 /* Ignore negative offsets for now. For others,
3608 use the lower bound as the most optimistic
3609 estimate of the (remaining) size. */
3610 if (wi::sign_mask (wioff))
3612 else if (wi::ltu_p (wioff, wisiz))
3613 return wide_int_to_tree (TREE_TYPE (size),
3614 wi::sub (wisiz, wioff));
3615 else
3616 return size_zero_node;
3619 else if (TREE_CODE (off) == SSA_NAME
3620 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3622 wide_int min, max;
3623 enum value_range_kind rng = get_range_info (off, &min, &max);
3625 if (rng == VR_RANGE)
3627 if (tree size = compute_objsize (dest, ostype))
3629 wide_int wisiz = wi::to_wide (size);
3631 /* Ignore negative offsets for now. For others,
3632 use the lower bound as the most optimistic
3633 estimate of the (remaining)size. */
3634 if (wi::sign_mask (min)
3635 || wi::sign_mask (max))
3637 else if (wi::ltu_p (min, wisiz))
3638 return wide_int_to_tree (TREE_TYPE (size),
3639 wi::sub (wisiz, min));
3640 else
3641 return size_zero_node;
3646 else if (code != ADDR_EXPR)
3647 return NULL_TREE;
3650 /* Unless computing the largest size (for memcpy and other raw memory
3651 functions), try to determine the size of the object from its type. */
3652 if (!ostype)
3653 return NULL_TREE;
3655 if (TREE_CODE (dest) != ADDR_EXPR)
3656 return NULL_TREE;
3658 tree type = TREE_TYPE (dest);
3659 if (TREE_CODE (type) == POINTER_TYPE)
3660 type = TREE_TYPE (type);
3662 type = TYPE_MAIN_VARIANT (type);
3664 if (TREE_CODE (type) == ARRAY_TYPE
3665 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3667 /* Return the constant size unless it's zero (that's a zero-length
3668 array likely at the end of a struct). */
3669 tree size = TYPE_SIZE_UNIT (type);
3670 if (size && TREE_CODE (size) == INTEGER_CST
3671 && !integer_zerop (size))
3672 return size;
3675 return NULL_TREE;
3678 /* Helper to determine and check the sizes of the source and the destination
3679 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3680 call expression, DEST is the destination argument, SRC is the source
3681 argument or null, and LEN is the number of bytes. Use Object Size type-0
3682 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3683 (no overflow or invalid sizes), false otherwise. */
3685 static bool
3686 check_memop_access (tree exp, tree dest, tree src, tree size)
3688 /* For functions like memset and memcpy that operate on raw memory
3689 try to determine the size of the largest source and destination
3690 object using type-0 Object Size regardless of the object size
3691 type specified by the option. */
3692 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3693 tree dstsize = compute_objsize (dest, 0);
3695 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3696 srcsize, dstsize);
3699 /* Validate memchr arguments without performing any expansion.
3700 Return NULL_RTX. */
3702 static rtx
3703 expand_builtin_memchr (tree exp, rtx)
3705 if (!validate_arglist (exp,
3706 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3707 return NULL_RTX;
3709 tree arg1 = CALL_EXPR_ARG (exp, 0);
3710 tree len = CALL_EXPR_ARG (exp, 2);
3712 /* Diagnose calls where the specified length exceeds the size
3713 of the object. */
3714 if (warn_stringop_overflow)
3716 tree size = compute_objsize (arg1, 0);
3717 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3718 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3721 return NULL_RTX;
3724 /* Expand a call EXP to the memcpy builtin.
3725 Return NULL_RTX if we failed, the caller should emit a normal call,
3726 otherwise try to get the result in TARGET, if convenient (and in
3727 mode MODE if that's convenient). */
3729 static rtx
3730 expand_builtin_memcpy (tree exp, rtx target)
3732 if (!validate_arglist (exp,
3733 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734 return NULL_RTX;
3736 tree dest = CALL_EXPR_ARG (exp, 0);
3737 tree src = CALL_EXPR_ARG (exp, 1);
3738 tree len = CALL_EXPR_ARG (exp, 2);
3740 check_memop_access (exp, dest, src, len);
3742 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3743 /*retmode=*/ RETURN_BEGIN);
3746 /* Check a call EXP to the memmove built-in for validity.
3747 Return NULL_RTX on both success and failure. */
3749 static rtx
3750 expand_builtin_memmove (tree exp, rtx)
3752 if (!validate_arglist (exp,
3753 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3754 return NULL_RTX;
3756 tree dest = CALL_EXPR_ARG (exp, 0);
3757 tree src = CALL_EXPR_ARG (exp, 1);
3758 tree len = CALL_EXPR_ARG (exp, 2);
3760 check_memop_access (exp, dest, src, len);
3762 return NULL_RTX;
3765 /* Expand a call EXP to the mempcpy builtin.
3766 Return NULL_RTX if we failed; the caller should emit a normal call,
3767 otherwise try to get the result in TARGET, if convenient (and in
3768 mode MODE if that's convenient). */
3770 static rtx
3771 expand_builtin_mempcpy (tree exp, rtx target)
3773 if (!validate_arglist (exp,
3774 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3775 return NULL_RTX;
3777 tree dest = CALL_EXPR_ARG (exp, 0);
3778 tree src = CALL_EXPR_ARG (exp, 1);
3779 tree len = CALL_EXPR_ARG (exp, 2);
3781 /* Policy does not generally allow using compute_objsize (which
3782 is used internally by check_memop_size) to change code generation
3783 or drive optimization decisions.
3785 In this instance it is safe because the code we generate has
3786 the same semantics regardless of the return value of
3787 check_memop_sizes. Exactly the same amount of data is copied
3788 and the return value is exactly the same in both cases.
3790 Furthermore, check_memop_size always uses mode 0 for the call to
3791 compute_objsize, so the imprecise nature of compute_objsize is
3792 avoided. */
3794 /* Avoid expanding mempcpy into memcpy when the call is determined
3795 to overflow the buffer. This also prevents the same overflow
3796 from being diagnosed again when expanding memcpy. */
3797 if (!check_memop_access (exp, dest, src, len))
3798 return NULL_RTX;
3800 return expand_builtin_mempcpy_args (dest, src, len,
3801 target, exp, /*retmode=*/ RETURN_END);
3804 /* Helper function to do the actual work for expand of memory copy family
3805 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3806 of memory from SRC to DEST and assign to TARGET if convenient. Return
3807 value is based on RETMODE argument. */
3809 static rtx
3810 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3811 rtx target, tree exp, memop_ret retmode)
3813 const char *src_str;
3814 unsigned int src_align = get_pointer_alignment (src);
3815 unsigned int dest_align = get_pointer_alignment (dest);
3816 rtx dest_mem, src_mem, dest_addr, len_rtx;
3817 HOST_WIDE_INT expected_size = -1;
3818 unsigned int expected_align = 0;
3819 unsigned HOST_WIDE_INT min_size;
3820 unsigned HOST_WIDE_INT max_size;
3821 unsigned HOST_WIDE_INT probable_max_size;
3823 bool is_move_done;
3825 /* If DEST is not a pointer type, call the normal function. */
3826 if (dest_align == 0)
3827 return NULL_RTX;
3829 /* If either SRC is not a pointer type, don't do this
3830 operation in-line. */
3831 if (src_align == 0)
3832 return NULL_RTX;
3834 if (currently_expanding_gimple_stmt)
3835 stringop_block_profile (currently_expanding_gimple_stmt,
3836 &expected_align, &expected_size);
3838 if (expected_align < dest_align)
3839 expected_align = dest_align;
3840 dest_mem = get_memory_rtx (dest, len);
3841 set_mem_align (dest_mem, dest_align);
3842 len_rtx = expand_normal (len);
3843 determine_block_size (len, len_rtx, &min_size, &max_size,
3844 &probable_max_size);
3845 src_str = c_getstr (src);
3847 /* If SRC is a string constant and block move would be done
3848 by pieces, we can avoid loading the string from memory
3849 and only stored the computed constants. */
3850 if (src_str
3851 && CONST_INT_P (len_rtx)
3852 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3853 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3854 CONST_CAST (char *, src_str),
3855 dest_align, false))
3857 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3858 builtin_memcpy_read_str,
3859 CONST_CAST (char *, src_str),
3860 dest_align, false, retmode);
3861 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3862 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3863 return dest_mem;
3866 src_mem = get_memory_rtx (src, len);
3867 set_mem_align (src_mem, src_align);
3869 /* Copy word part most expediently. */
3870 enum block_op_methods method = BLOCK_OP_NORMAL;
3871 if (CALL_EXPR_TAILCALL (exp)
3872 && (retmode == RETURN_BEGIN || target == const0_rtx))
3873 method = BLOCK_OP_TAILCALL;
3874 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3875 && retmode == RETURN_END
3876 && target != const0_rtx);
3877 if (use_mempcpy_call)
3878 method = BLOCK_OP_NO_LIBCALL_RET;
3879 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3880 expected_align, expected_size,
3881 min_size, max_size, probable_max_size,
3882 use_mempcpy_call, &is_move_done);
3884 /* Bail out when a mempcpy call would be expanded as libcall and when
3885 we have a target that provides a fast implementation
3886 of mempcpy routine. */
3887 if (!is_move_done)
3888 return NULL_RTX;
3890 if (dest_addr == pc_rtx)
3891 return NULL_RTX;
3893 if (dest_addr == 0)
3895 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3896 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3899 if (retmode != RETURN_BEGIN && target != const0_rtx)
3901 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3902 /* stpcpy pointer to last byte. */
3903 if (retmode == RETURN_END_MINUS_ONE)
3904 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3907 return dest_addr;
3910 static rtx
3911 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3912 rtx target, tree orig_exp, memop_ret retmode)
3914 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3915 retmode);
3918 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3919 we failed, the caller should emit a normal call, otherwise try to
3920 get the result in TARGET, if convenient.
3921 Return value is based on RETMODE argument. */
3923 static rtx
3924 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3926 struct expand_operand ops[3];
3927 rtx dest_mem;
3928 rtx src_mem;
3930 if (!targetm.have_movstr ())
3931 return NULL_RTX;
3933 dest_mem = get_memory_rtx (dest, NULL);
3934 src_mem = get_memory_rtx (src, NULL);
3935 if (retmode == RETURN_BEGIN)
3937 target = force_reg (Pmode, XEXP (dest_mem, 0));
3938 dest_mem = replace_equiv_address (dest_mem, target);
3941 create_output_operand (&ops[0],
3942 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3943 create_fixed_operand (&ops[1], dest_mem);
3944 create_fixed_operand (&ops[2], src_mem);
3945 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3946 return NULL_RTX;
3948 if (retmode != RETURN_BEGIN && target != const0_rtx)
3950 target = ops[0].value;
3951 /* movstr is supposed to set end to the address of the NUL
3952 terminator. If the caller requested a mempcpy-like return value,
3953 adjust it. */
3954 if (retmode == RETURN_END)
3956 rtx tem = plus_constant (GET_MODE (target),
3957 gen_lowpart (GET_MODE (target), target), 1);
3958 emit_move_insn (target, force_operand (tem, NULL_RTX));
3961 return target;
3964 /* Do some very basic size validation of a call to the strcpy builtin
3965 given by EXP. Return NULL_RTX to have the built-in expand to a call
3966 to the library function. */
3968 static rtx
3969 expand_builtin_strcat (tree exp, rtx)
3971 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3972 || !warn_stringop_overflow)
3973 return NULL_RTX;
3975 tree dest = CALL_EXPR_ARG (exp, 0);
3976 tree src = CALL_EXPR_ARG (exp, 1);
3978 /* There is no way here to determine the length of the string in
3979 the destination to which the SRC string is being appended so
3980 just diagnose cases when the souce string is longer than
3981 the destination object. */
3983 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3985 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3986 destsize);
3988 return NULL_RTX;
3991 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3992 NULL_RTX if we failed the caller should emit a normal call, otherwise
3993 try to get the result in TARGET, if convenient (and in mode MODE if that's
3994 convenient). */
3996 static rtx
3997 expand_builtin_strcpy (tree exp, rtx target)
3999 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4000 return NULL_RTX;
4002 tree dest = CALL_EXPR_ARG (exp, 0);
4003 tree src = CALL_EXPR_ARG (exp, 1);
4005 if (warn_stringop_overflow)
4007 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4008 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4009 src, destsize);
4012 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4014 /* Check to see if the argument was declared attribute nonstring
4015 and if so, issue a warning since at this point it's not known
4016 to be nul-terminated. */
4017 tree fndecl = get_callee_fndecl (exp);
4018 maybe_warn_nonstring_arg (fndecl, exp);
4019 return ret;
4022 return NULL_RTX;
4025 /* Helper function to do the actual work for expand_builtin_strcpy. The
4026 arguments to the builtin_strcpy call DEST and SRC are broken out
4027 so that this can also be called without constructing an actual CALL_EXPR.
4028 The other arguments and return value are the same as for
4029 expand_builtin_strcpy. */
4031 static rtx
4032 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4034 /* Detect strcpy calls with unterminated arrays.. */
4035 if (tree nonstr = unterminated_array (src))
4037 /* NONSTR refers to the non-nul terminated constant array. */
4038 if (!TREE_NO_WARNING (exp))
4039 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4040 return NULL_RTX;
4043 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4046 /* Expand a call EXP to the stpcpy builtin.
4047 Return NULL_RTX if we failed the caller should emit a normal call,
4048 otherwise try to get the result in TARGET, if convenient (and in
4049 mode MODE if that's convenient). */
4051 static rtx
4052 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4054 tree dst, src;
4055 location_t loc = EXPR_LOCATION (exp);
4057 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4058 return NULL_RTX;
4060 dst = CALL_EXPR_ARG (exp, 0);
4061 src = CALL_EXPR_ARG (exp, 1);
4063 if (warn_stringop_overflow)
4065 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4066 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4067 src, destsize);
4070 /* If return value is ignored, transform stpcpy into strcpy. */
4071 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4073 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4074 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4075 return expand_expr (result, target, mode, EXPAND_NORMAL);
4077 else
4079 tree len, lenp1;
4080 rtx ret;
4082 /* Ensure we get an actual string whose length can be evaluated at
4083 compile-time, not an expression containing a string. This is
4084 because the latter will potentially produce pessimized code
4085 when used to produce the return value. */
4086 c_strlen_data lendata = { };
4087 if (!c_getstr (src, NULL)
4088 || !(len = c_strlen (src, 0, &lendata, 1)))
4089 return expand_movstr (dst, src, target,
4090 /*retmode=*/ RETURN_END_MINUS_ONE);
4092 if (lendata.decl && !TREE_NO_WARNING (exp))
4093 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4095 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4096 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4097 target, exp,
4098 /*retmode=*/ RETURN_END_MINUS_ONE);
4100 if (ret)
4101 return ret;
4103 if (TREE_CODE (len) == INTEGER_CST)
4105 rtx len_rtx = expand_normal (len);
4107 if (CONST_INT_P (len_rtx))
4109 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4111 if (ret)
4113 if (! target)
4115 if (mode != VOIDmode)
4116 target = gen_reg_rtx (mode);
4117 else
4118 target = gen_reg_rtx (GET_MODE (ret));
4120 if (GET_MODE (target) != GET_MODE (ret))
4121 ret = gen_lowpart (GET_MODE (target), ret);
4123 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4124 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4125 gcc_assert (ret);
4127 return target;
4132 return expand_movstr (dst, src, target,
4133 /*retmode=*/ RETURN_END_MINUS_ONE);
4137 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4138 arguments while being careful to avoid duplicate warnings (which could
4139 be issued if the expander were to expand the call, resulting in it
4140 being emitted in expand_call(). */
4142 static rtx
4143 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4145 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4147 /* The call has been successfully expanded. Check for nonstring
4148 arguments and issue warnings as appropriate. */
4149 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4150 return ret;
4153 return NULL_RTX;
4156 /* Check a call EXP to the stpncpy built-in for validity.
4157 Return NULL_RTX on both success and failure. */
4159 static rtx
4160 expand_builtin_stpncpy (tree exp, rtx)
4162 if (!validate_arglist (exp,
4163 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4164 || !warn_stringop_overflow)
4165 return NULL_RTX;
4167 /* The source and destination of the call. */
4168 tree dest = CALL_EXPR_ARG (exp, 0);
4169 tree src = CALL_EXPR_ARG (exp, 1);
4171 /* The exact number of bytes to write (not the maximum). */
4172 tree len = CALL_EXPR_ARG (exp, 2);
4174 /* The size of the destination object. */
4175 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4177 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4179 return NULL_RTX;
4182 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4183 bytes from constant string DATA + OFFSET and return it as target
4184 constant. */
4187 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4188 scalar_int_mode mode)
4190 const char *str = (const char *) data;
4192 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4193 return const0_rtx;
4195 return c_readstr (str + offset, mode);
4198 /* Helper to check the sizes of sequences and the destination of calls
4199 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4200 success (no overflow or invalid sizes), false otherwise. */
4202 static bool
4203 check_strncat_sizes (tree exp, tree objsize)
4205 tree dest = CALL_EXPR_ARG (exp, 0);
4206 tree src = CALL_EXPR_ARG (exp, 1);
4207 tree maxread = CALL_EXPR_ARG (exp, 2);
4209 /* Try to determine the range of lengths that the source expression
4210 refers to. */
4211 c_strlen_data lendata = { };
4212 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4214 /* Try to verify that the destination is big enough for the shortest
4215 string. */
4217 if (!objsize && warn_stringop_overflow)
4219 /* If it hasn't been provided by __strncat_chk, try to determine
4220 the size of the destination object into which the source is
4221 being copied. */
4222 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4225 /* Add one for the terminating nul. */
4226 tree srclen = (lendata.minlen
4227 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4228 size_one_node)
4229 : NULL_TREE);
4231 /* The strncat function copies at most MAXREAD bytes and always appends
4232 the terminating nul so the specified upper bound should never be equal
4233 to (or greater than) the size of the destination. */
4234 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4235 && tree_int_cst_equal (objsize, maxread))
4237 location_t loc = tree_nonartificial_location (exp);
4238 loc = expansion_point_location_if_in_system_header (loc);
4240 warning_at (loc, OPT_Wstringop_overflow_,
4241 "%K%qD specified bound %E equals destination size",
4242 exp, get_callee_fndecl (exp), maxread);
4244 return false;
4247 if (!srclen
4248 || (maxread && tree_fits_uhwi_p (maxread)
4249 && tree_fits_uhwi_p (srclen)
4250 && tree_int_cst_lt (maxread, srclen)))
4251 srclen = maxread;
4253 /* The number of bytes to write is LEN but check_access will also
4254 check SRCLEN if LEN's value isn't known. */
4255 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4256 objsize);
4259 /* Similar to expand_builtin_strcat, do some very basic size validation
4260 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4261 the built-in expand to a call to the library function. */
4263 static rtx
4264 expand_builtin_strncat (tree exp, rtx)
4266 if (!validate_arglist (exp,
4267 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4268 || !warn_stringop_overflow)
4269 return NULL_RTX;
4271 tree dest = CALL_EXPR_ARG (exp, 0);
4272 tree src = CALL_EXPR_ARG (exp, 1);
4273 /* The upper bound on the number of bytes to write. */
4274 tree maxread = CALL_EXPR_ARG (exp, 2);
4275 /* The length of the source sequence. */
4276 tree slen = c_strlen (src, 1);
4278 /* Try to determine the range of lengths that the source expression
4279 refers to. Since the lengths are only used for warning and not
4280 for code generation disable strict mode below. */
4281 tree maxlen = slen;
4282 if (!maxlen)
4284 c_strlen_data lendata = { };
4285 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4286 maxlen = lendata.maxbound;
4289 /* Try to verify that the destination is big enough for the shortest
4290 string. First try to determine the size of the destination object
4291 into which the source is being copied. */
4292 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4294 /* Add one for the terminating nul. */
4295 tree srclen = (maxlen
4296 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4297 size_one_node)
4298 : NULL_TREE);
4300 /* The strncat function copies at most MAXREAD bytes and always appends
4301 the terminating nul so the specified upper bound should never be equal
4302 to (or greater than) the size of the destination. */
4303 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4304 && tree_int_cst_equal (destsize, maxread))
4306 location_t loc = tree_nonartificial_location (exp);
4307 loc = expansion_point_location_if_in_system_header (loc);
4309 warning_at (loc, OPT_Wstringop_overflow_,
4310 "%K%qD specified bound %E equals destination size",
4311 exp, get_callee_fndecl (exp), maxread);
4313 return NULL_RTX;
4316 if (!srclen
4317 || (maxread && tree_fits_uhwi_p (maxread)
4318 && tree_fits_uhwi_p (srclen)
4319 && tree_int_cst_lt (maxread, srclen)))
4320 srclen = maxread;
4322 /* The number of bytes to write is SRCLEN. */
4323 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4325 return NULL_RTX;
4328 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4329 NULL_RTX if we failed the caller should emit a normal call. */
4331 static rtx
4332 expand_builtin_strncpy (tree exp, rtx target)
4334 location_t loc = EXPR_LOCATION (exp);
4336 if (validate_arglist (exp,
4337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4339 tree dest = CALL_EXPR_ARG (exp, 0);
4340 tree src = CALL_EXPR_ARG (exp, 1);
4341 /* The number of bytes to write (not the maximum). */
4342 tree len = CALL_EXPR_ARG (exp, 2);
4343 /* The length of the source sequence. */
4344 tree slen = c_strlen (src, 1);
4346 if (warn_stringop_overflow)
4348 tree destsize = compute_objsize (dest,
4349 warn_stringop_overflow - 1);
4351 /* The number of bytes to write is LEN but check_access will also
4352 check SLEN if LEN's value isn't known. */
4353 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4354 destsize);
4357 /* We must be passed a constant len and src parameter. */
4358 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4359 return NULL_RTX;
4361 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4363 /* We're required to pad with trailing zeros if the requested
4364 len is greater than strlen(s2)+1. In that case try to
4365 use store_by_pieces, if it fails, punt. */
4366 if (tree_int_cst_lt (slen, len))
4368 unsigned int dest_align = get_pointer_alignment (dest);
4369 const char *p = c_getstr (src);
4370 rtx dest_mem;
4372 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4373 || !can_store_by_pieces (tree_to_uhwi (len),
4374 builtin_strncpy_read_str,
4375 CONST_CAST (char *, p),
4376 dest_align, false))
4377 return NULL_RTX;
4379 dest_mem = get_memory_rtx (dest, len);
4380 store_by_pieces (dest_mem, tree_to_uhwi (len),
4381 builtin_strncpy_read_str,
4382 CONST_CAST (char *, p), dest_align, false,
4383 RETURN_BEGIN);
4384 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4385 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4386 return dest_mem;
4389 return NULL_RTX;
4392 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4393 bytes from constant string DATA + OFFSET and return it as target
4394 constant. */
4397 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4398 scalar_int_mode mode)
4400 const char *c = (const char *) data;
4401 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4403 memset (p, *c, GET_MODE_SIZE (mode));
4405 return c_readstr (p, mode);
4408 /* Callback routine for store_by_pieces. Return the RTL of a register
4409 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4410 char value given in the RTL register data. For example, if mode is
4411 4 bytes wide, return the RTL for 0x01010101*data. */
4413 static rtx
4414 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4415 scalar_int_mode mode)
4417 rtx target, coeff;
4418 size_t size;
4419 char *p;
4421 size = GET_MODE_SIZE (mode);
4422 if (size == 1)
4423 return (rtx) data;
4425 p = XALLOCAVEC (char, size);
4426 memset (p, 1, size);
4427 coeff = c_readstr (p, mode);
4429 target = convert_to_mode (mode, (rtx) data, 1);
4430 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4431 return force_reg (mode, target);
4434 /* Expand expression EXP, which is a call to the memset builtin. Return
4435 NULL_RTX if we failed the caller should emit a normal call, otherwise
4436 try to get the result in TARGET, if convenient (and in mode MODE if that's
4437 convenient). */
4439 static rtx
4440 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4442 if (!validate_arglist (exp,
4443 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4444 return NULL_RTX;
4446 tree dest = CALL_EXPR_ARG (exp, 0);
4447 tree val = CALL_EXPR_ARG (exp, 1);
4448 tree len = CALL_EXPR_ARG (exp, 2);
4450 check_memop_access (exp, dest, NULL_TREE, len);
4452 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4455 /* Helper function to do the actual work for expand_builtin_memset. The
4456 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4457 so that this can also be called without constructing an actual CALL_EXPR.
4458 The other arguments and return value are the same as for
4459 expand_builtin_memset. */
4461 static rtx
4462 expand_builtin_memset_args (tree dest, tree val, tree len,
4463 rtx target, machine_mode mode, tree orig_exp)
4465 tree fndecl, fn;
4466 enum built_in_function fcode;
4467 machine_mode val_mode;
4468 char c;
4469 unsigned int dest_align;
4470 rtx dest_mem, dest_addr, len_rtx;
4471 HOST_WIDE_INT expected_size = -1;
4472 unsigned int expected_align = 0;
4473 unsigned HOST_WIDE_INT min_size;
4474 unsigned HOST_WIDE_INT max_size;
4475 unsigned HOST_WIDE_INT probable_max_size;
4477 dest_align = get_pointer_alignment (dest);
4479 /* If DEST is not a pointer type, don't do this operation in-line. */
4480 if (dest_align == 0)
4481 return NULL_RTX;
4483 if (currently_expanding_gimple_stmt)
4484 stringop_block_profile (currently_expanding_gimple_stmt,
4485 &expected_align, &expected_size);
4487 if (expected_align < dest_align)
4488 expected_align = dest_align;
4490 /* If the LEN parameter is zero, return DEST. */
4491 if (integer_zerop (len))
4493 /* Evaluate and ignore VAL in case it has side-effects. */
4494 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4495 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4498 /* Stabilize the arguments in case we fail. */
4499 dest = builtin_save_expr (dest);
4500 val = builtin_save_expr (val);
4501 len = builtin_save_expr (len);
4503 len_rtx = expand_normal (len);
4504 determine_block_size (len, len_rtx, &min_size, &max_size,
4505 &probable_max_size);
4506 dest_mem = get_memory_rtx (dest, len);
4507 val_mode = TYPE_MODE (unsigned_char_type_node);
4509 if (TREE_CODE (val) != INTEGER_CST)
4511 rtx val_rtx;
4513 val_rtx = expand_normal (val);
4514 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4516 /* Assume that we can memset by pieces if we can store
4517 * the coefficients by pieces (in the required modes).
4518 * We can't pass builtin_memset_gen_str as that emits RTL. */
4519 c = 1;
4520 if (tree_fits_uhwi_p (len)
4521 && can_store_by_pieces (tree_to_uhwi (len),
4522 builtin_memset_read_str, &c, dest_align,
4523 true))
4525 val_rtx = force_reg (val_mode, val_rtx);
4526 store_by_pieces (dest_mem, tree_to_uhwi (len),
4527 builtin_memset_gen_str, val_rtx, dest_align,
4528 true, RETURN_BEGIN);
4530 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4531 dest_align, expected_align,
4532 expected_size, min_size, max_size,
4533 probable_max_size))
4534 goto do_libcall;
4536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4538 return dest_mem;
4541 if (target_char_cast (val, &c))
4542 goto do_libcall;
4544 if (c)
4546 if (tree_fits_uhwi_p (len)
4547 && can_store_by_pieces (tree_to_uhwi (len),
4548 builtin_memset_read_str, &c, dest_align,
4549 true))
4550 store_by_pieces (dest_mem, tree_to_uhwi (len),
4551 builtin_memset_read_str, &c, dest_align, true,
4552 RETURN_BEGIN);
4553 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4554 gen_int_mode (c, val_mode),
4555 dest_align, expected_align,
4556 expected_size, min_size, max_size,
4557 probable_max_size))
4558 goto do_libcall;
4560 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4561 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4562 return dest_mem;
4565 set_mem_align (dest_mem, dest_align);
4566 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4567 CALL_EXPR_TAILCALL (orig_exp)
4568 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4569 expected_align, expected_size,
4570 min_size, max_size,
4571 probable_max_size);
4573 if (dest_addr == 0)
4575 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4576 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4579 return dest_addr;
4581 do_libcall:
4582 fndecl = get_callee_fndecl (orig_exp);
4583 fcode = DECL_FUNCTION_CODE (fndecl);
4584 if (fcode == BUILT_IN_MEMSET)
4585 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4586 dest, val, len);
4587 else if (fcode == BUILT_IN_BZERO)
4588 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4589 dest, len);
4590 else
4591 gcc_unreachable ();
4592 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4593 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4594 return expand_call (fn, target, target == const0_rtx);
4597 /* Expand expression EXP, which is a call to the bzero builtin. Return
4598 NULL_RTX if we failed the caller should emit a normal call. */
4600 static rtx
4601 expand_builtin_bzero (tree exp)
4603 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4604 return NULL_RTX;
4606 tree dest = CALL_EXPR_ARG (exp, 0);
4607 tree size = CALL_EXPR_ARG (exp, 1);
4609 check_memop_access (exp, dest, NULL_TREE, size);
4611 /* New argument list transforming bzero(ptr x, int y) to
4612 memset(ptr x, int 0, size_t y). This is done this way
4613 so that if it isn't expanded inline, we fallback to
4614 calling bzero instead of memset. */
4616 location_t loc = EXPR_LOCATION (exp);
4618 return expand_builtin_memset_args (dest, integer_zero_node,
4619 fold_convert_loc (loc,
4620 size_type_node, size),
4621 const0_rtx, VOIDmode, exp);
4624 /* Try to expand cmpstr operation ICODE with the given operands.
4625 Return the result rtx on success, otherwise return null. */
4627 static rtx
4628 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4629 HOST_WIDE_INT align)
4631 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4633 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4634 target = NULL_RTX;
4636 struct expand_operand ops[4];
4637 create_output_operand (&ops[0], target, insn_mode);
4638 create_fixed_operand (&ops[1], arg1_rtx);
4639 create_fixed_operand (&ops[2], arg2_rtx);
4640 create_integer_operand (&ops[3], align);
4641 if (maybe_expand_insn (icode, 4, ops))
4642 return ops[0].value;
4643 return NULL_RTX;
4646 /* Expand expression EXP, which is a call to the memcmp built-in function.
4647 Return NULL_RTX if we failed and the caller should emit a normal call,
4648 otherwise try to get the result in TARGET, if convenient.
4649 RESULT_EQ is true if we can relax the returned value to be either zero
4650 or nonzero, without caring about the sign. */
4652 static rtx
4653 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4655 if (!validate_arglist (exp,
4656 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4657 return NULL_RTX;
4659 tree arg1 = CALL_EXPR_ARG (exp, 0);
4660 tree arg2 = CALL_EXPR_ARG (exp, 1);
4661 tree len = CALL_EXPR_ARG (exp, 2);
4662 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4663 bool no_overflow = true;
4665 /* Diagnose calls where the specified length exceeds the size of either
4666 object. */
4667 tree size = compute_objsize (arg1, 0);
4668 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4669 len, /*maxread=*/NULL_TREE, size,
4670 /*objsize=*/NULL_TREE);
4671 if (no_overflow)
4673 size = compute_objsize (arg2, 0);
4674 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4675 len, /*maxread=*/NULL_TREE, size,
4676 /*objsize=*/NULL_TREE);
4679 /* If the specified length exceeds the size of either object,
4680 call the function. */
4681 if (!no_overflow)
4682 return NULL_RTX;
4684 /* Due to the performance benefit, always inline the calls first
4685 when result_eq is false. */
4686 rtx result = NULL_RTX;
4688 if (!result_eq && fcode != BUILT_IN_BCMP)
4690 result = inline_expand_builtin_string_cmp (exp, target);
4691 if (result)
4692 return result;
4695 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4696 location_t loc = EXPR_LOCATION (exp);
4698 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4699 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4701 /* If we don't have POINTER_TYPE, call the function. */
4702 if (arg1_align == 0 || arg2_align == 0)
4703 return NULL_RTX;
4705 rtx arg1_rtx = get_memory_rtx (arg1, len);
4706 rtx arg2_rtx = get_memory_rtx (arg2, len);
4707 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4709 /* Set MEM_SIZE as appropriate. */
4710 if (CONST_INT_P (len_rtx))
4712 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4713 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4716 by_pieces_constfn constfn = NULL;
4718 const char *src_str = c_getstr (arg2);
4719 if (result_eq && src_str == NULL)
4721 src_str = c_getstr (arg1);
4722 if (src_str != NULL)
4723 std::swap (arg1_rtx, arg2_rtx);
4726 /* If SRC is a string constant and block move would be done
4727 by pieces, we can avoid loading the string from memory
4728 and only stored the computed constants. */
4729 if (src_str
4730 && CONST_INT_P (len_rtx)
4731 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4732 constfn = builtin_memcpy_read_str;
4734 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4735 TREE_TYPE (len), target,
4736 result_eq, constfn,
4737 CONST_CAST (char *, src_str));
4739 if (result)
4741 /* Return the value in the proper mode for this function. */
4742 if (GET_MODE (result) == mode)
4743 return result;
4745 if (target != 0)
4747 convert_move (target, result, 0);
4748 return target;
4751 return convert_to_mode (mode, result, 0);
4754 return NULL_RTX;
4757 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4758 if we failed the caller should emit a normal call, otherwise try to get
4759 the result in TARGET, if convenient. */
4761 static rtx
4762 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4764 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4765 return NULL_RTX;
4767 /* Due to the performance benefit, always inline the calls first. */
4768 rtx result = NULL_RTX;
4769 result = inline_expand_builtin_string_cmp (exp, target);
4770 if (result)
4771 return result;
4773 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4774 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4775 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4776 return NULL_RTX;
4778 tree arg1 = CALL_EXPR_ARG (exp, 0);
4779 tree arg2 = CALL_EXPR_ARG (exp, 1);
4781 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4782 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4784 /* If we don't have POINTER_TYPE, call the function. */
4785 if (arg1_align == 0 || arg2_align == 0)
4786 return NULL_RTX;
4788 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4789 arg1 = builtin_save_expr (arg1);
4790 arg2 = builtin_save_expr (arg2);
4792 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4793 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4795 /* Try to call cmpstrsi. */
4796 if (cmpstr_icode != CODE_FOR_nothing)
4797 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4798 MIN (arg1_align, arg2_align));
4800 /* Try to determine at least one length and call cmpstrnsi. */
4801 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4803 tree len;
4804 rtx arg3_rtx;
4806 tree len1 = c_strlen (arg1, 1);
4807 tree len2 = c_strlen (arg2, 1);
4809 if (len1)
4810 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4811 if (len2)
4812 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4814 /* If we don't have a constant length for the first, use the length
4815 of the second, if we know it. We don't require a constant for
4816 this case; some cost analysis could be done if both are available
4817 but neither is constant. For now, assume they're equally cheap,
4818 unless one has side effects. If both strings have constant lengths,
4819 use the smaller. */
4821 if (!len1)
4822 len = len2;
4823 else if (!len2)
4824 len = len1;
4825 else if (TREE_SIDE_EFFECTS (len1))
4826 len = len2;
4827 else if (TREE_SIDE_EFFECTS (len2))
4828 len = len1;
4829 else if (TREE_CODE (len1) != INTEGER_CST)
4830 len = len2;
4831 else if (TREE_CODE (len2) != INTEGER_CST)
4832 len = len1;
4833 else if (tree_int_cst_lt (len1, len2))
4834 len = len1;
4835 else
4836 len = len2;
4838 /* If both arguments have side effects, we cannot optimize. */
4839 if (len && !TREE_SIDE_EFFECTS (len))
4841 arg3_rtx = expand_normal (len);
4842 result = expand_cmpstrn_or_cmpmem
4843 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4844 arg3_rtx, MIN (arg1_align, arg2_align));
4848 tree fndecl = get_callee_fndecl (exp);
4849 if (result)
4851 /* Check to see if the argument was declared attribute nonstring
4852 and if so, issue a warning since at this point it's not known
4853 to be nul-terminated. */
4854 maybe_warn_nonstring_arg (fndecl, exp);
4856 /* Return the value in the proper mode for this function. */
4857 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4858 if (GET_MODE (result) == mode)
4859 return result;
4860 if (target == 0)
4861 return convert_to_mode (mode, result, 0);
4862 convert_move (target, result, 0);
4863 return target;
4866 /* Expand the library call ourselves using a stabilized argument
4867 list to avoid re-evaluating the function's arguments twice. */
4868 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4869 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4870 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4871 return expand_call (fn, target, target == const0_rtx);
4874 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4875 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4876 the result in TARGET, if convenient. */
4878 static rtx
4879 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4880 ATTRIBUTE_UNUSED machine_mode mode)
4882 if (!validate_arglist (exp,
4883 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4884 return NULL_RTX;
4886 /* Due to the performance benefit, always inline the calls first. */
4887 rtx result = NULL_RTX;
4888 result = inline_expand_builtin_string_cmp (exp, target);
4889 if (result)
4890 return result;
4892 /* If c_strlen can determine an expression for one of the string
4893 lengths, and it doesn't have side effects, then emit cmpstrnsi
4894 using length MIN(strlen(string)+1, arg3). */
4895 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4896 if (cmpstrn_icode == CODE_FOR_nothing)
4897 return NULL_RTX;
4899 tree len;
4901 tree arg1 = CALL_EXPR_ARG (exp, 0);
4902 tree arg2 = CALL_EXPR_ARG (exp, 1);
4903 tree arg3 = CALL_EXPR_ARG (exp, 2);
4905 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4906 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4908 tree len1 = c_strlen (arg1, 1);
4909 tree len2 = c_strlen (arg2, 1);
4911 location_t loc = EXPR_LOCATION (exp);
4913 if (len1)
4914 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4915 if (len2)
4916 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4918 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4920 /* If we don't have a constant length for the first, use the length
4921 of the second, if we know it. If neither string is constant length,
4922 use the given length argument. We don't require a constant for
4923 this case; some cost analysis could be done if both are available
4924 but neither is constant. For now, assume they're equally cheap,
4925 unless one has side effects. If both strings have constant lengths,
4926 use the smaller. */
4928 if (!len1 && !len2)
4929 len = len3;
4930 else if (!len1)
4931 len = len2;
4932 else if (!len2)
4933 len = len1;
4934 else if (TREE_SIDE_EFFECTS (len1))
4935 len = len2;
4936 else if (TREE_SIDE_EFFECTS (len2))
4937 len = len1;
4938 else if (TREE_CODE (len1) != INTEGER_CST)
4939 len = len2;
4940 else if (TREE_CODE (len2) != INTEGER_CST)
4941 len = len1;
4942 else if (tree_int_cst_lt (len1, len2))
4943 len = len1;
4944 else
4945 len = len2;
4947 /* If we are not using the given length, we must incorporate it here.
4948 The actual new length parameter will be MIN(len,arg3) in this case. */
4949 if (len != len3)
4951 len = fold_convert_loc (loc, sizetype, len);
4952 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4954 rtx arg1_rtx = get_memory_rtx (arg1, len);
4955 rtx arg2_rtx = get_memory_rtx (arg2, len);
4956 rtx arg3_rtx = expand_normal (len);
4957 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4958 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4959 MIN (arg1_align, arg2_align));
4961 tree fndecl = get_callee_fndecl (exp);
4962 if (result)
4964 /* Check to see if the argument was declared attribute nonstring
4965 and if so, issue a warning since at this point it's not known
4966 to be nul-terminated. */
4967 maybe_warn_nonstring_arg (fndecl, exp);
4969 /* Return the value in the proper mode for this function. */
4970 mode = TYPE_MODE (TREE_TYPE (exp));
4971 if (GET_MODE (result) == mode)
4972 return result;
4973 if (target == 0)
4974 return convert_to_mode (mode, result, 0);
4975 convert_move (target, result, 0);
4976 return target;
4979 /* Expand the library call ourselves using a stabilized argument
4980 list to avoid re-evaluating the function's arguments twice. */
4981 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4982 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4983 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4984 return expand_call (fn, target, target == const0_rtx);
4987 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4988 if that's convenient. */
4991 expand_builtin_saveregs (void)
4993 rtx val;
4994 rtx_insn *seq;
4996 /* Don't do __builtin_saveregs more than once in a function.
4997 Save the result of the first call and reuse it. */
4998 if (saveregs_value != 0)
4999 return saveregs_value;
5001 /* When this function is called, it means that registers must be
5002 saved on entry to this function. So we migrate the call to the
5003 first insn of this function. */
5005 start_sequence ();
5007 /* Do whatever the machine needs done in this case. */
5008 val = targetm.calls.expand_builtin_saveregs ();
5010 seq = get_insns ();
5011 end_sequence ();
5013 saveregs_value = val;
5015 /* Put the insns after the NOTE that starts the function. If this
5016 is inside a start_sequence, make the outer-level insn chain current, so
5017 the code is placed at the start of the function. */
5018 push_topmost_sequence ();
5019 emit_insn_after (seq, entry_of_function ());
5020 pop_topmost_sequence ();
5022 return val;
5025 /* Expand a call to __builtin_next_arg. */
5027 static rtx
5028 expand_builtin_next_arg (void)
5030 /* Checking arguments is already done in fold_builtin_next_arg
5031 that must be called before this function. */
5032 return expand_binop (ptr_mode, add_optab,
5033 crtl->args.internal_arg_pointer,
5034 crtl->args.arg_offset_rtx,
5035 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5038 /* Make it easier for the backends by protecting the valist argument
5039 from multiple evaluations. */
5041 static tree
5042 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5044 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5046 /* The current way of determining the type of valist is completely
5047 bogus. We should have the information on the va builtin instead. */
5048 if (!vatype)
5049 vatype = targetm.fn_abi_va_list (cfun->decl);
5051 if (TREE_CODE (vatype) == ARRAY_TYPE)
5053 if (TREE_SIDE_EFFECTS (valist))
5054 valist = save_expr (valist);
5056 /* For this case, the backends will be expecting a pointer to
5057 vatype, but it's possible we've actually been given an array
5058 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5059 So fix it. */
5060 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5062 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5063 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5066 else
5068 tree pt = build_pointer_type (vatype);
5070 if (! needs_lvalue)
5072 if (! TREE_SIDE_EFFECTS (valist))
5073 return valist;
5075 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5076 TREE_SIDE_EFFECTS (valist) = 1;
5079 if (TREE_SIDE_EFFECTS (valist))
5080 valist = save_expr (valist);
5081 valist = fold_build2_loc (loc, MEM_REF,
5082 vatype, valist, build_int_cst (pt, 0));
5085 return valist;
5088 /* The "standard" definition of va_list is void*. */
5090 tree
5091 std_build_builtin_va_list (void)
5093 return ptr_type_node;
5096 /* The "standard" abi va_list is va_list_type_node. */
5098 tree
5099 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5101 return va_list_type_node;
5104 /* The "standard" type of va_list is va_list_type_node. */
5106 tree
5107 std_canonical_va_list_type (tree type)
5109 tree wtype, htype;
5111 wtype = va_list_type_node;
5112 htype = type;
5114 if (TREE_CODE (wtype) == ARRAY_TYPE)
5116 /* If va_list is an array type, the argument may have decayed
5117 to a pointer type, e.g. by being passed to another function.
5118 In that case, unwrap both types so that we can compare the
5119 underlying records. */
5120 if (TREE_CODE (htype) == ARRAY_TYPE
5121 || POINTER_TYPE_P (htype))
5123 wtype = TREE_TYPE (wtype);
5124 htype = TREE_TYPE (htype);
5127 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5128 return va_list_type_node;
5130 return NULL_TREE;
5133 /* The "standard" implementation of va_start: just assign `nextarg' to
5134 the variable. */
5136 void
5137 std_expand_builtin_va_start (tree valist, rtx nextarg)
5139 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5140 convert_move (va_r, nextarg, 0);
5143 /* Expand EXP, a call to __builtin_va_start. */
5145 static rtx
5146 expand_builtin_va_start (tree exp)
5148 rtx nextarg;
5149 tree valist;
5150 location_t loc = EXPR_LOCATION (exp);
5152 if (call_expr_nargs (exp) < 2)
5154 error_at (loc, "too few arguments to function %<va_start%>");
5155 return const0_rtx;
5158 if (fold_builtin_next_arg (exp, true))
5159 return const0_rtx;
5161 nextarg = expand_builtin_next_arg ();
5162 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5164 if (targetm.expand_builtin_va_start)
5165 targetm.expand_builtin_va_start (valist, nextarg);
5166 else
5167 std_expand_builtin_va_start (valist, nextarg);
5169 return const0_rtx;
5172 /* Expand EXP, a call to __builtin_va_end. */
5174 static rtx
5175 expand_builtin_va_end (tree exp)
5177 tree valist = CALL_EXPR_ARG (exp, 0);
5179 /* Evaluate for side effects, if needed. I hate macros that don't
5180 do that. */
5181 if (TREE_SIDE_EFFECTS (valist))
5182 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5184 return const0_rtx;
5187 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5188 builtin rather than just as an assignment in stdarg.h because of the
5189 nastiness of array-type va_list types. */
5191 static rtx
5192 expand_builtin_va_copy (tree exp)
5194 tree dst, src, t;
5195 location_t loc = EXPR_LOCATION (exp);
5197 dst = CALL_EXPR_ARG (exp, 0);
5198 src = CALL_EXPR_ARG (exp, 1);
5200 dst = stabilize_va_list_loc (loc, dst, 1);
5201 src = stabilize_va_list_loc (loc, src, 0);
5203 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5205 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5207 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5208 TREE_SIDE_EFFECTS (t) = 1;
5209 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5211 else
5213 rtx dstb, srcb, size;
5215 /* Evaluate to pointers. */
5216 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5217 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5218 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5219 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5221 dstb = convert_memory_address (Pmode, dstb);
5222 srcb = convert_memory_address (Pmode, srcb);
5224 /* "Dereference" to BLKmode memories. */
5225 dstb = gen_rtx_MEM (BLKmode, dstb);
5226 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5227 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5228 srcb = gen_rtx_MEM (BLKmode, srcb);
5229 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5230 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5232 /* Copy. */
5233 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5236 return const0_rtx;
5239 /* Expand a call to one of the builtin functions __builtin_frame_address or
5240 __builtin_return_address. */
5242 static rtx
5243 expand_builtin_frame_address (tree fndecl, tree exp)
5245 /* The argument must be a nonnegative integer constant.
5246 It counts the number of frames to scan up the stack.
5247 The value is either the frame pointer value or the return
5248 address saved in that frame. */
5249 if (call_expr_nargs (exp) == 0)
5250 /* Warning about missing arg was already issued. */
5251 return const0_rtx;
5252 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5254 error ("invalid argument to %qD", fndecl);
5255 return const0_rtx;
5257 else
5259 /* Number of frames to scan up the stack. */
5260 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5262 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5264 /* Some ports cannot access arbitrary stack frames. */
5265 if (tem == NULL)
5267 warning (0, "unsupported argument to %qD", fndecl);
5268 return const0_rtx;
5271 if (count)
5273 /* Warn since no effort is made to ensure that any frame
5274 beyond the current one exists or can be safely reached. */
5275 warning (OPT_Wframe_address, "calling %qD with "
5276 "a nonzero argument is unsafe", fndecl);
5279 /* For __builtin_frame_address, return what we've got. */
5280 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5281 return tem;
5283 if (!REG_P (tem)
5284 && ! CONSTANT_P (tem))
5285 tem = copy_addr_to_reg (tem);
5286 return tem;
5290 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5291 failed and the caller should emit a normal call. */
5293 static rtx
5294 expand_builtin_alloca (tree exp)
5296 rtx op0;
5297 rtx result;
5298 unsigned int align;
5299 tree fndecl = get_callee_fndecl (exp);
5300 HOST_WIDE_INT max_size;
5301 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5302 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5303 bool valid_arglist
5304 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5305 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5306 VOID_TYPE)
5307 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5308 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5309 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5311 if (!valid_arglist)
5312 return NULL_RTX;
5314 if ((alloca_for_var
5315 && warn_vla_limit >= HOST_WIDE_INT_MAX
5316 && warn_alloc_size_limit < warn_vla_limit)
5317 || (!alloca_for_var
5318 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5319 && warn_alloc_size_limit < warn_alloca_limit
5322 /* -Walloca-larger-than and -Wvla-larger-than settings of
5323 less than HOST_WIDE_INT_MAX override the more general
5324 -Walloc-size-larger-than so unless either of the former
5325 options is smaller than the last one (wchich would imply
5326 that the call was already checked), check the alloca
5327 arguments for overflow. */
5328 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5329 int idx[] = { 0, -1 };
5330 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5333 /* Compute the argument. */
5334 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5336 /* Compute the alignment. */
5337 align = (fcode == BUILT_IN_ALLOCA
5338 ? BIGGEST_ALIGNMENT
5339 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5341 /* Compute the maximum size. */
5342 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5343 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5344 : -1);
5346 /* Allocate the desired space. If the allocation stems from the declaration
5347 of a variable-sized object, it cannot accumulate. */
5348 result
5349 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5350 result = convert_memory_address (ptr_mode, result);
5352 return result;
5355 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5356 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5357 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5358 handle_builtin_stack_restore function. */
5360 static rtx
5361 expand_asan_emit_allocas_unpoison (tree exp)
5363 tree arg0 = CALL_EXPR_ARG (exp, 0);
5364 tree arg1 = CALL_EXPR_ARG (exp, 1);
5365 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5366 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5367 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5368 stack_pointer_rtx, NULL_RTX, 0,
5369 OPTAB_LIB_WIDEN);
5370 off = convert_modes (ptr_mode, Pmode, off, 0);
5371 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5372 OPTAB_LIB_WIDEN);
5373 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5374 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5375 top, ptr_mode, bot, ptr_mode);
5376 return ret;
5379 /* Expand a call to bswap builtin in EXP.
5380 Return NULL_RTX if a normal call should be emitted rather than expanding the
5381 function in-line. If convenient, the result should be placed in TARGET.
5382 SUBTARGET may be used as the target for computing one of EXP's operands. */
5384 static rtx
5385 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5386 rtx subtarget)
5388 tree arg;
5389 rtx op0;
5391 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5392 return NULL_RTX;
5394 arg = CALL_EXPR_ARG (exp, 0);
5395 op0 = expand_expr (arg,
5396 subtarget && GET_MODE (subtarget) == target_mode
5397 ? subtarget : NULL_RTX,
5398 target_mode, EXPAND_NORMAL);
5399 if (GET_MODE (op0) != target_mode)
5400 op0 = convert_to_mode (target_mode, op0, 1);
5402 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5404 gcc_assert (target);
5406 return convert_to_mode (target_mode, target, 1);
5409 /* Expand a call to a unary builtin in EXP.
5410 Return NULL_RTX if a normal call should be emitted rather than expanding the
5411 function in-line. If convenient, the result should be placed in TARGET.
5412 SUBTARGET may be used as the target for computing one of EXP's operands. */
5414 static rtx
5415 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5416 rtx subtarget, optab op_optab)
5418 rtx op0;
5420 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5421 return NULL_RTX;
5423 /* Compute the argument. */
5424 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5425 (subtarget
5426 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5427 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5428 VOIDmode, EXPAND_NORMAL);
5429 /* Compute op, into TARGET if possible.
5430 Set TARGET to wherever the result comes back. */
5431 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5432 op_optab, op0, target, op_optab != clrsb_optab);
5433 gcc_assert (target);
5435 return convert_to_mode (target_mode, target, 0);
5438 /* Expand a call to __builtin_expect. We just return our argument
5439 as the builtin_expect semantic should've been already executed by
5440 tree branch prediction pass. */
5442 static rtx
5443 expand_builtin_expect (tree exp, rtx target)
5445 tree arg;
5447 if (call_expr_nargs (exp) < 2)
5448 return const0_rtx;
5449 arg = CALL_EXPR_ARG (exp, 0);
5451 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5452 /* When guessing was done, the hints should be already stripped away. */
5453 gcc_assert (!flag_guess_branch_prob
5454 || optimize == 0 || seen_error ());
5455 return target;
5458 /* Expand a call to __builtin_expect_with_probability. We just return our
5459 argument as the builtin_expect semantic should've been already executed by
5460 tree branch prediction pass. */
5462 static rtx
5463 expand_builtin_expect_with_probability (tree exp, rtx target)
5465 tree arg;
5467 if (call_expr_nargs (exp) < 3)
5468 return const0_rtx;
5469 arg = CALL_EXPR_ARG (exp, 0);
5471 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5472 /* When guessing was done, the hints should be already stripped away. */
5473 gcc_assert (!flag_guess_branch_prob
5474 || optimize == 0 || seen_error ());
5475 return target;
5479 /* Expand a call to __builtin_assume_aligned. We just return our first
5480 argument as the builtin_assume_aligned semantic should've been already
5481 executed by CCP. */
5483 static rtx
5484 expand_builtin_assume_aligned (tree exp, rtx target)
5486 if (call_expr_nargs (exp) < 2)
5487 return const0_rtx;
5488 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5489 EXPAND_NORMAL);
5490 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5491 && (call_expr_nargs (exp) < 3
5492 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5493 return target;
5496 void
5497 expand_builtin_trap (void)
5499 if (targetm.have_trap ())
5501 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5502 /* For trap insns when not accumulating outgoing args force
5503 REG_ARGS_SIZE note to prevent crossjumping of calls with
5504 different args sizes. */
5505 if (!ACCUMULATE_OUTGOING_ARGS)
5506 add_args_size_note (insn, stack_pointer_delta);
5508 else
5510 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5511 tree call_expr = build_call_expr (fn, 0);
5512 expand_call (call_expr, NULL_RTX, false);
5515 emit_barrier ();
5518 /* Expand a call to __builtin_unreachable. We do nothing except emit
5519 a barrier saying that control flow will not pass here.
5521 It is the responsibility of the program being compiled to ensure
5522 that control flow does never reach __builtin_unreachable. */
5523 static void
5524 expand_builtin_unreachable (void)
5526 emit_barrier ();
5529 /* Expand EXP, a call to fabs, fabsf or fabsl.
5530 Return NULL_RTX if a normal call should be emitted rather than expanding
5531 the function inline. If convenient, the result should be placed
5532 in TARGET. SUBTARGET may be used as the target for computing
5533 the operand. */
5535 static rtx
5536 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5538 machine_mode mode;
5539 tree arg;
5540 rtx op0;
5542 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5543 return NULL_RTX;
5545 arg = CALL_EXPR_ARG (exp, 0);
5546 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5547 mode = TYPE_MODE (TREE_TYPE (arg));
5548 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5549 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5552 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5553 Return NULL is a normal call should be emitted rather than expanding the
5554 function inline. If convenient, the result should be placed in TARGET.
5555 SUBTARGET may be used as the target for computing the operand. */
5557 static rtx
5558 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5560 rtx op0, op1;
5561 tree arg;
5563 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5564 return NULL_RTX;
5566 arg = CALL_EXPR_ARG (exp, 0);
5567 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5569 arg = CALL_EXPR_ARG (exp, 1);
5570 op1 = expand_normal (arg);
5572 return expand_copysign (op0, op1, target);
5575 /* Expand a call to __builtin___clear_cache. */
5577 static rtx
5578 expand_builtin___clear_cache (tree exp)
5580 if (!targetm.code_for_clear_cache)
5582 #ifdef CLEAR_INSN_CACHE
5583 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5584 does something. Just do the default expansion to a call to
5585 __clear_cache(). */
5586 return NULL_RTX;
5587 #else
5588 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5589 does nothing. There is no need to call it. Do nothing. */
5590 return const0_rtx;
5591 #endif /* CLEAR_INSN_CACHE */
5594 /* We have a "clear_cache" insn, and it will handle everything. */
5595 tree begin, end;
5596 rtx begin_rtx, end_rtx;
5598 /* We must not expand to a library call. If we did, any
5599 fallback library function in libgcc that might contain a call to
5600 __builtin___clear_cache() would recurse infinitely. */
5601 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5603 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5604 return const0_rtx;
5607 if (targetm.have_clear_cache ())
5609 struct expand_operand ops[2];
5611 begin = CALL_EXPR_ARG (exp, 0);
5612 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5614 end = CALL_EXPR_ARG (exp, 1);
5615 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5617 create_address_operand (&ops[0], begin_rtx);
5618 create_address_operand (&ops[1], end_rtx);
5619 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5620 return const0_rtx;
5622 return const0_rtx;
5625 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5627 static rtx
5628 round_trampoline_addr (rtx tramp)
5630 rtx temp, addend, mask;
5632 /* If we don't need too much alignment, we'll have been guaranteed
5633 proper alignment by get_trampoline_type. */
5634 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5635 return tramp;
5637 /* Round address up to desired boundary. */
5638 temp = gen_reg_rtx (Pmode);
5639 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5640 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5642 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5643 temp, 0, OPTAB_LIB_WIDEN);
5644 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5645 temp, 0, OPTAB_LIB_WIDEN);
5647 return tramp;
5650 static rtx
5651 expand_builtin_init_trampoline (tree exp, bool onstack)
5653 tree t_tramp, t_func, t_chain;
5654 rtx m_tramp, r_tramp, r_chain, tmp;
5656 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5657 POINTER_TYPE, VOID_TYPE))
5658 return NULL_RTX;
5660 t_tramp = CALL_EXPR_ARG (exp, 0);
5661 t_func = CALL_EXPR_ARG (exp, 1);
5662 t_chain = CALL_EXPR_ARG (exp, 2);
5664 r_tramp = expand_normal (t_tramp);
5665 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5666 MEM_NOTRAP_P (m_tramp) = 1;
5668 /* If ONSTACK, the TRAMP argument should be the address of a field
5669 within the local function's FRAME decl. Either way, let's see if
5670 we can fill in the MEM_ATTRs for this memory. */
5671 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5672 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5674 /* Creator of a heap trampoline is responsible for making sure the
5675 address is aligned to at least STACK_BOUNDARY. Normally malloc
5676 will ensure this anyhow. */
5677 tmp = round_trampoline_addr (r_tramp);
5678 if (tmp != r_tramp)
5680 m_tramp = change_address (m_tramp, BLKmode, tmp);
5681 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5682 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5685 /* The FUNC argument should be the address of the nested function.
5686 Extract the actual function decl to pass to the hook. */
5687 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5688 t_func = TREE_OPERAND (t_func, 0);
5689 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5691 r_chain = expand_normal (t_chain);
5693 /* Generate insns to initialize the trampoline. */
5694 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5696 if (onstack)
5698 trampolines_created = 1;
5700 if (targetm.calls.custom_function_descriptors != 0)
5701 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5702 "trampoline generated for nested function %qD", t_func);
5705 return const0_rtx;
5708 static rtx
5709 expand_builtin_adjust_trampoline (tree exp)
5711 rtx tramp;
5713 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5714 return NULL_RTX;
5716 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5717 tramp = round_trampoline_addr (tramp);
5718 if (targetm.calls.trampoline_adjust_address)
5719 tramp = targetm.calls.trampoline_adjust_address (tramp);
5721 return tramp;
5724 /* Expand a call to the builtin descriptor initialization routine.
5725 A descriptor is made up of a couple of pointers to the static
5726 chain and the code entry in this order. */
5728 static rtx
5729 expand_builtin_init_descriptor (tree exp)
5731 tree t_descr, t_func, t_chain;
5732 rtx m_descr, r_descr, r_func, r_chain;
5734 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5735 VOID_TYPE))
5736 return NULL_RTX;
5738 t_descr = CALL_EXPR_ARG (exp, 0);
5739 t_func = CALL_EXPR_ARG (exp, 1);
5740 t_chain = CALL_EXPR_ARG (exp, 2);
5742 r_descr = expand_normal (t_descr);
5743 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5744 MEM_NOTRAP_P (m_descr) = 1;
5746 r_func = expand_normal (t_func);
5747 r_chain = expand_normal (t_chain);
5749 /* Generate insns to initialize the descriptor. */
5750 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5751 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5752 POINTER_SIZE / BITS_PER_UNIT), r_func);
5754 return const0_rtx;
5757 /* Expand a call to the builtin descriptor adjustment routine. */
5759 static rtx
5760 expand_builtin_adjust_descriptor (tree exp)
5762 rtx tramp;
5764 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5765 return NULL_RTX;
5767 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5769 /* Unalign the descriptor to allow runtime identification. */
5770 tramp = plus_constant (ptr_mode, tramp,
5771 targetm.calls.custom_function_descriptors);
5773 return force_operand (tramp, NULL_RTX);
5776 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5777 function. The function first checks whether the back end provides
5778 an insn to implement signbit for the respective mode. If not, it
5779 checks whether the floating point format of the value is such that
5780 the sign bit can be extracted. If that is not the case, error out.
5781 EXP is the expression that is a call to the builtin function; if
5782 convenient, the result should be placed in TARGET. */
5783 static rtx
5784 expand_builtin_signbit (tree exp, rtx target)
5786 const struct real_format *fmt;
5787 scalar_float_mode fmode;
5788 scalar_int_mode rmode, imode;
5789 tree arg;
5790 int word, bitpos;
5791 enum insn_code icode;
5792 rtx temp;
5793 location_t loc = EXPR_LOCATION (exp);
5795 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5796 return NULL_RTX;
5798 arg = CALL_EXPR_ARG (exp, 0);
5799 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5800 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5801 fmt = REAL_MODE_FORMAT (fmode);
5803 arg = builtin_save_expr (arg);
5805 /* Expand the argument yielding a RTX expression. */
5806 temp = expand_normal (arg);
5808 /* Check if the back end provides an insn that handles signbit for the
5809 argument's mode. */
5810 icode = optab_handler (signbit_optab, fmode);
5811 if (icode != CODE_FOR_nothing)
5813 rtx_insn *last = get_last_insn ();
5814 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5815 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5816 return target;
5817 delete_insns_since (last);
5820 /* For floating point formats without a sign bit, implement signbit
5821 as "ARG < 0.0". */
5822 bitpos = fmt->signbit_ro;
5823 if (bitpos < 0)
5825 /* But we can't do this if the format supports signed zero. */
5826 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5828 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5829 build_real (TREE_TYPE (arg), dconst0));
5830 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5833 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5835 imode = int_mode_for_mode (fmode).require ();
5836 temp = gen_lowpart (imode, temp);
5838 else
5840 imode = word_mode;
5841 /* Handle targets with different FP word orders. */
5842 if (FLOAT_WORDS_BIG_ENDIAN)
5843 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5844 else
5845 word = bitpos / BITS_PER_WORD;
5846 temp = operand_subword_force (temp, word, fmode);
5847 bitpos = bitpos % BITS_PER_WORD;
5850 /* Force the intermediate word_mode (or narrower) result into a
5851 register. This avoids attempting to create paradoxical SUBREGs
5852 of floating point modes below. */
5853 temp = force_reg (imode, temp);
5855 /* If the bitpos is within the "result mode" lowpart, the operation
5856 can be implement with a single bitwise AND. Otherwise, we need
5857 a right shift and an AND. */
5859 if (bitpos < GET_MODE_BITSIZE (rmode))
5861 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5863 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5864 temp = gen_lowpart (rmode, temp);
5865 temp = expand_binop (rmode, and_optab, temp,
5866 immed_wide_int_const (mask, rmode),
5867 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5869 else
5871 /* Perform a logical right shift to place the signbit in the least
5872 significant bit, then truncate the result to the desired mode
5873 and mask just this bit. */
5874 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5875 temp = gen_lowpart (rmode, temp);
5876 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5877 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5880 return temp;
5883 /* Expand fork or exec calls. TARGET is the desired target of the
5884 call. EXP is the call. FN is the
5885 identificator of the actual function. IGNORE is nonzero if the
5886 value is to be ignored. */
5888 static rtx
5889 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5891 tree id, decl;
5892 tree call;
5894 /* If we are not profiling, just call the function. */
5895 if (!profile_arc_flag)
5896 return NULL_RTX;
5898 /* Otherwise call the wrapper. This should be equivalent for the rest of
5899 compiler, so the code does not diverge, and the wrapper may run the
5900 code necessary for keeping the profiling sane. */
5902 switch (DECL_FUNCTION_CODE (fn))
5904 case BUILT_IN_FORK:
5905 id = get_identifier ("__gcov_fork");
5906 break;
5908 case BUILT_IN_EXECL:
5909 id = get_identifier ("__gcov_execl");
5910 break;
5912 case BUILT_IN_EXECV:
5913 id = get_identifier ("__gcov_execv");
5914 break;
5916 case BUILT_IN_EXECLP:
5917 id = get_identifier ("__gcov_execlp");
5918 break;
5920 case BUILT_IN_EXECLE:
5921 id = get_identifier ("__gcov_execle");
5922 break;
5924 case BUILT_IN_EXECVP:
5925 id = get_identifier ("__gcov_execvp");
5926 break;
5928 case BUILT_IN_EXECVE:
5929 id = get_identifier ("__gcov_execve");
5930 break;
5932 default:
5933 gcc_unreachable ();
5936 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5937 FUNCTION_DECL, id, TREE_TYPE (fn));
5938 DECL_EXTERNAL (decl) = 1;
5939 TREE_PUBLIC (decl) = 1;
5940 DECL_ARTIFICIAL (decl) = 1;
5941 TREE_NOTHROW (decl) = 1;
5942 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5943 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5944 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5945 return expand_call (call, target, ignore);
5950 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5951 the pointer in these functions is void*, the tree optimizers may remove
5952 casts. The mode computed in expand_builtin isn't reliable either, due
5953 to __sync_bool_compare_and_swap.
5955 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5956 group of builtins. This gives us log2 of the mode size. */
5958 static inline machine_mode
5959 get_builtin_sync_mode (int fcode_diff)
5961 /* The size is not negotiable, so ask not to get BLKmode in return
5962 if the target indicates that a smaller size would be better. */
5963 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5966 /* Expand the memory expression LOC and return the appropriate memory operand
5967 for the builtin_sync operations. */
5969 static rtx
5970 get_builtin_sync_mem (tree loc, machine_mode mode)
5972 rtx addr, mem;
5973 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5974 ? TREE_TYPE (TREE_TYPE (loc))
5975 : TREE_TYPE (loc));
5976 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5978 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5979 addr = convert_memory_address (addr_mode, addr);
5981 /* Note that we explicitly do not want any alias information for this
5982 memory, so that we kill all other live memories. Otherwise we don't
5983 satisfy the full barrier semantics of the intrinsic. */
5984 mem = gen_rtx_MEM (mode, addr);
5986 set_mem_addr_space (mem, addr_space);
5988 mem = validize_mem (mem);
5990 /* The alignment needs to be at least according to that of the mode. */
5991 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5992 get_pointer_alignment (loc)));
5993 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5994 MEM_VOLATILE_P (mem) = 1;
5996 return mem;
5999 /* Make sure an argument is in the right mode.
6000 EXP is the tree argument.
6001 MODE is the mode it should be in. */
6003 static rtx
6004 expand_expr_force_mode (tree exp, machine_mode mode)
6006 rtx val;
6007 machine_mode old_mode;
6009 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6010 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6011 of CONST_INTs, where we know the old_mode only from the call argument. */
6013 old_mode = GET_MODE (val);
6014 if (old_mode == VOIDmode)
6015 old_mode = TYPE_MODE (TREE_TYPE (exp));
6016 val = convert_modes (mode, old_mode, val, 1);
6017 return val;
6021 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6022 EXP is the CALL_EXPR. CODE is the rtx code
6023 that corresponds to the arithmetic or logical operation from the name;
6024 an exception here is that NOT actually means NAND. TARGET is an optional
6025 place for us to store the results; AFTER is true if this is the
6026 fetch_and_xxx form. */
6028 static rtx
6029 expand_builtin_sync_operation (machine_mode mode, tree exp,
6030 enum rtx_code code, bool after,
6031 rtx target)
6033 rtx val, mem;
6034 location_t loc = EXPR_LOCATION (exp);
6036 if (code == NOT && warn_sync_nand)
6038 tree fndecl = get_callee_fndecl (exp);
6039 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6041 static bool warned_f_a_n, warned_n_a_f;
6043 switch (fcode)
6045 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6046 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6047 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6048 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6049 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6050 if (warned_f_a_n)
6051 break;
6053 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6054 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6055 warned_f_a_n = true;
6056 break;
6058 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6059 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6060 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6061 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6062 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6063 if (warned_n_a_f)
6064 break;
6066 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6067 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6068 warned_n_a_f = true;
6069 break;
6071 default:
6072 gcc_unreachable ();
6076 /* Expand the operands. */
6077 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6078 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6080 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6081 after);
6084 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6085 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6086 true if this is the boolean form. TARGET is a place for us to store the
6087 results; this is NOT optional if IS_BOOL is true. */
6089 static rtx
6090 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6091 bool is_bool, rtx target)
6093 rtx old_val, new_val, mem;
6094 rtx *pbool, *poval;
6096 /* Expand the operands. */
6097 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6098 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6099 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6101 pbool = poval = NULL;
6102 if (target != const0_rtx)
6104 if (is_bool)
6105 pbool = &target;
6106 else
6107 poval = &target;
6109 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6110 false, MEMMODEL_SYNC_SEQ_CST,
6111 MEMMODEL_SYNC_SEQ_CST))
6112 return NULL_RTX;
6114 return target;
6117 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6118 general form is actually an atomic exchange, and some targets only
6119 support a reduced form with the second argument being a constant 1.
6120 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6121 the results. */
6123 static rtx
6124 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6125 rtx target)
6127 rtx val, mem;
6129 /* Expand the operands. */
6130 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6131 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6133 return expand_sync_lock_test_and_set (target, mem, val);
6136 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6138 static void
6139 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6141 rtx mem;
6143 /* Expand the operands. */
6144 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6146 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6149 /* Given an integer representing an ``enum memmodel'', verify its
6150 correctness and return the memory model enum. */
6152 static enum memmodel
6153 get_memmodel (tree exp)
6155 rtx op;
6156 unsigned HOST_WIDE_INT val;
6157 location_t loc
6158 = expansion_point_location_if_in_system_header (input_location);
6160 /* If the parameter is not a constant, it's a run time value so we'll just
6161 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6162 if (TREE_CODE (exp) != INTEGER_CST)
6163 return MEMMODEL_SEQ_CST;
6165 op = expand_normal (exp);
6167 val = INTVAL (op);
6168 if (targetm.memmodel_check)
6169 val = targetm.memmodel_check (val);
6170 else if (val & ~MEMMODEL_MASK)
6172 warning_at (loc, OPT_Winvalid_memory_model,
6173 "unknown architecture specifier in memory model to builtin");
6174 return MEMMODEL_SEQ_CST;
6177 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6178 if (memmodel_base (val) >= MEMMODEL_LAST)
6180 warning_at (loc, OPT_Winvalid_memory_model,
6181 "invalid memory model argument to builtin");
6182 return MEMMODEL_SEQ_CST;
6185 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6186 be conservative and promote consume to acquire. */
6187 if (val == MEMMODEL_CONSUME)
6188 val = MEMMODEL_ACQUIRE;
6190 return (enum memmodel) val;
6193 /* Expand the __atomic_exchange intrinsic:
6194 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6195 EXP is the CALL_EXPR.
6196 TARGET is an optional place for us to store the results. */
6198 static rtx
6199 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6201 rtx val, mem;
6202 enum memmodel model;
6204 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6206 if (!flag_inline_atomics)
6207 return NULL_RTX;
6209 /* Expand the operands. */
6210 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6211 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6213 return expand_atomic_exchange (target, mem, val, model);
6216 /* Expand the __atomic_compare_exchange intrinsic:
6217 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6218 TYPE desired, BOOL weak,
6219 enum memmodel success,
6220 enum memmodel failure)
6221 EXP is the CALL_EXPR.
6222 TARGET is an optional place for us to store the results. */
6224 static rtx
6225 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6226 rtx target)
6228 rtx expect, desired, mem, oldval;
6229 rtx_code_label *label;
6230 enum memmodel success, failure;
6231 tree weak;
6232 bool is_weak;
6233 location_t loc
6234 = expansion_point_location_if_in_system_header (input_location);
6236 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6237 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6239 if (failure > success)
6241 warning_at (loc, OPT_Winvalid_memory_model,
6242 "failure memory model cannot be stronger than success "
6243 "memory model for %<__atomic_compare_exchange%>");
6244 success = MEMMODEL_SEQ_CST;
6247 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6249 warning_at (loc, OPT_Winvalid_memory_model,
6250 "invalid failure memory model for "
6251 "%<__atomic_compare_exchange%>");
6252 failure = MEMMODEL_SEQ_CST;
6253 success = MEMMODEL_SEQ_CST;
6257 if (!flag_inline_atomics)
6258 return NULL_RTX;
6260 /* Expand the operands. */
6261 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6263 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6264 expect = convert_memory_address (Pmode, expect);
6265 expect = gen_rtx_MEM (mode, expect);
6266 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6268 weak = CALL_EXPR_ARG (exp, 3);
6269 is_weak = false;
6270 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6271 is_weak = true;
6273 if (target == const0_rtx)
6274 target = NULL;
6276 /* Lest the rtl backend create a race condition with an imporoper store
6277 to memory, always create a new pseudo for OLDVAL. */
6278 oldval = NULL;
6280 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6281 is_weak, success, failure))
6282 return NULL_RTX;
6284 /* Conditionally store back to EXPECT, lest we create a race condition
6285 with an improper store to memory. */
6286 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6287 the normal case where EXPECT is totally private, i.e. a register. At
6288 which point the store can be unconditional. */
6289 label = gen_label_rtx ();
6290 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6291 GET_MODE (target), 1, label);
6292 emit_move_insn (expect, oldval);
6293 emit_label (label);
6295 return target;
6298 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6299 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6300 call. The weak parameter must be dropped to match the expected parameter
6301 list and the expected argument changed from value to pointer to memory
6302 slot. */
6304 static void
6305 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6307 unsigned int z;
6308 vec<tree, va_gc> *vec;
6310 vec_alloc (vec, 5);
6311 vec->quick_push (gimple_call_arg (call, 0));
6312 tree expected = gimple_call_arg (call, 1);
6313 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6314 TREE_TYPE (expected));
6315 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6316 if (expd != x)
6317 emit_move_insn (x, expd);
6318 tree v = make_tree (TREE_TYPE (expected), x);
6319 vec->quick_push (build1 (ADDR_EXPR,
6320 build_pointer_type (TREE_TYPE (expected)), v));
6321 vec->quick_push (gimple_call_arg (call, 2));
6322 /* Skip the boolean weak parameter. */
6323 for (z = 4; z < 6; z++)
6324 vec->quick_push (gimple_call_arg (call, z));
6325 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6326 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6327 gcc_assert (bytes_log2 < 5);
6328 built_in_function fncode
6329 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6330 + bytes_log2);
6331 tree fndecl = builtin_decl_explicit (fncode);
6332 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6333 fndecl);
6334 tree exp = build_call_vec (boolean_type_node, fn, vec);
6335 tree lhs = gimple_call_lhs (call);
6336 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6337 if (lhs)
6339 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6340 if (GET_MODE (boolret) != mode)
6341 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6342 x = force_reg (mode, x);
6343 write_complex_part (target, boolret, true);
6344 write_complex_part (target, x, false);
6348 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6350 void
6351 expand_ifn_atomic_compare_exchange (gcall *call)
6353 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6354 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6355 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6356 rtx expect, desired, mem, oldval, boolret;
6357 enum memmodel success, failure;
6358 tree lhs;
6359 bool is_weak;
6360 location_t loc
6361 = expansion_point_location_if_in_system_header (gimple_location (call));
6363 success = get_memmodel (gimple_call_arg (call, 4));
6364 failure = get_memmodel (gimple_call_arg (call, 5));
6366 if (failure > success)
6368 warning_at (loc, OPT_Winvalid_memory_model,
6369 "failure memory model cannot be stronger than success "
6370 "memory model for %<__atomic_compare_exchange%>");
6371 success = MEMMODEL_SEQ_CST;
6374 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6376 warning_at (loc, OPT_Winvalid_memory_model,
6377 "invalid failure memory model for "
6378 "%<__atomic_compare_exchange%>");
6379 failure = MEMMODEL_SEQ_CST;
6380 success = MEMMODEL_SEQ_CST;
6383 if (!flag_inline_atomics)
6385 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6386 return;
6389 /* Expand the operands. */
6390 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6392 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6393 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6395 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6397 boolret = NULL;
6398 oldval = NULL;
6400 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6401 is_weak, success, failure))
6403 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6404 return;
6407 lhs = gimple_call_lhs (call);
6408 if (lhs)
6410 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6411 if (GET_MODE (boolret) != mode)
6412 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6413 write_complex_part (target, boolret, true);
6414 write_complex_part (target, oldval, false);
6418 /* Expand the __atomic_load intrinsic:
6419 TYPE __atomic_load (TYPE *object, enum memmodel)
6420 EXP is the CALL_EXPR.
6421 TARGET is an optional place for us to store the results. */
6423 static rtx
6424 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6426 rtx mem;
6427 enum memmodel model;
6429 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6430 if (is_mm_release (model) || is_mm_acq_rel (model))
6432 location_t loc
6433 = expansion_point_location_if_in_system_header (input_location);
6434 warning_at (loc, OPT_Winvalid_memory_model,
6435 "invalid memory model for %<__atomic_load%>");
6436 model = MEMMODEL_SEQ_CST;
6439 if (!flag_inline_atomics)
6440 return NULL_RTX;
6442 /* Expand the operand. */
6443 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6445 return expand_atomic_load (target, mem, model);
6449 /* Expand the __atomic_store intrinsic:
6450 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6451 EXP is the CALL_EXPR.
6452 TARGET is an optional place for us to store the results. */
6454 static rtx
6455 expand_builtin_atomic_store (machine_mode mode, tree exp)
6457 rtx mem, val;
6458 enum memmodel model;
6460 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6461 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6462 || is_mm_release (model)))
6464 location_t loc
6465 = expansion_point_location_if_in_system_header (input_location);
6466 warning_at (loc, OPT_Winvalid_memory_model,
6467 "invalid memory model for %<__atomic_store%>");
6468 model = MEMMODEL_SEQ_CST;
6471 if (!flag_inline_atomics)
6472 return NULL_RTX;
6474 /* Expand the operands. */
6475 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6476 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6478 return expand_atomic_store (mem, val, model, false);
6481 /* Expand the __atomic_fetch_XXX intrinsic:
6482 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6483 EXP is the CALL_EXPR.
6484 TARGET is an optional place for us to store the results.
6485 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6486 FETCH_AFTER is true if returning the result of the operation.
6487 FETCH_AFTER is false if returning the value before the operation.
6488 IGNORE is true if the result is not used.
6489 EXT_CALL is the correct builtin for an external call if this cannot be
6490 resolved to an instruction sequence. */
6492 static rtx
6493 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6494 enum rtx_code code, bool fetch_after,
6495 bool ignore, enum built_in_function ext_call)
6497 rtx val, mem, ret;
6498 enum memmodel model;
6499 tree fndecl;
6500 tree addr;
6502 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6504 /* Expand the operands. */
6505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6506 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6508 /* Only try generating instructions if inlining is turned on. */
6509 if (flag_inline_atomics)
6511 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6512 if (ret)
6513 return ret;
6516 /* Return if a different routine isn't needed for the library call. */
6517 if (ext_call == BUILT_IN_NONE)
6518 return NULL_RTX;
6520 /* Change the call to the specified function. */
6521 fndecl = get_callee_fndecl (exp);
6522 addr = CALL_EXPR_FN (exp);
6523 STRIP_NOPS (addr);
6525 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6526 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6528 /* If we will emit code after the call, the call cannot be a tail call.
6529 If it is emitted as a tail call, a barrier is emitted after it, and
6530 then all trailing code is removed. */
6531 if (!ignore)
6532 CALL_EXPR_TAILCALL (exp) = 0;
6534 /* Expand the call here so we can emit trailing code. */
6535 ret = expand_call (exp, target, ignore);
6537 /* Replace the original function just in case it matters. */
6538 TREE_OPERAND (addr, 0) = fndecl;
6540 /* Then issue the arithmetic correction to return the right result. */
6541 if (!ignore)
6543 if (code == NOT)
6545 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6546 OPTAB_LIB_WIDEN);
6547 ret = expand_simple_unop (mode, NOT, ret, target, true);
6549 else
6550 ret = expand_simple_binop (mode, code, ret, val, target, true,
6551 OPTAB_LIB_WIDEN);
6553 return ret;
6556 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6558 void
6559 expand_ifn_atomic_bit_test_and (gcall *call)
6561 tree ptr = gimple_call_arg (call, 0);
6562 tree bit = gimple_call_arg (call, 1);
6563 tree flag = gimple_call_arg (call, 2);
6564 tree lhs = gimple_call_lhs (call);
6565 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6566 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6567 enum rtx_code code;
6568 optab optab;
6569 struct expand_operand ops[5];
6571 gcc_assert (flag_inline_atomics);
6573 if (gimple_call_num_args (call) == 4)
6574 model = get_memmodel (gimple_call_arg (call, 3));
6576 rtx mem = get_builtin_sync_mem (ptr, mode);
6577 rtx val = expand_expr_force_mode (bit, mode);
6579 switch (gimple_call_internal_fn (call))
6581 case IFN_ATOMIC_BIT_TEST_AND_SET:
6582 code = IOR;
6583 optab = atomic_bit_test_and_set_optab;
6584 break;
6585 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6586 code = XOR;
6587 optab = atomic_bit_test_and_complement_optab;
6588 break;
6589 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6590 code = AND;
6591 optab = atomic_bit_test_and_reset_optab;
6592 break;
6593 default:
6594 gcc_unreachable ();
6597 if (lhs == NULL_TREE)
6599 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6600 val, NULL_RTX, true, OPTAB_DIRECT);
6601 if (code == AND)
6602 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6603 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6604 return;
6607 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6608 enum insn_code icode = direct_optab_handler (optab, mode);
6609 gcc_assert (icode != CODE_FOR_nothing);
6610 create_output_operand (&ops[0], target, mode);
6611 create_fixed_operand (&ops[1], mem);
6612 create_convert_operand_to (&ops[2], val, mode, true);
6613 create_integer_operand (&ops[3], model);
6614 create_integer_operand (&ops[4], integer_onep (flag));
6615 if (maybe_expand_insn (icode, 5, ops))
6616 return;
6618 rtx bitval = val;
6619 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6620 val, NULL_RTX, true, OPTAB_DIRECT);
6621 rtx maskval = val;
6622 if (code == AND)
6623 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6624 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6625 code, model, false);
6626 if (integer_onep (flag))
6628 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6629 NULL_RTX, true, OPTAB_DIRECT);
6630 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6631 true, OPTAB_DIRECT);
6633 else
6634 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6635 OPTAB_DIRECT);
6636 if (result != target)
6637 emit_move_insn (target, result);
6640 /* Expand an atomic clear operation.
6641 void _atomic_clear (BOOL *obj, enum memmodel)
6642 EXP is the call expression. */
6644 static rtx
6645 expand_builtin_atomic_clear (tree exp)
6647 machine_mode mode;
6648 rtx mem, ret;
6649 enum memmodel model;
6651 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6652 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6653 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6655 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6657 location_t loc
6658 = expansion_point_location_if_in_system_header (input_location);
6659 warning_at (loc, OPT_Winvalid_memory_model,
6660 "invalid memory model for %<__atomic_store%>");
6661 model = MEMMODEL_SEQ_CST;
6664 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6665 Failing that, a store is issued by __atomic_store. The only way this can
6666 fail is if the bool type is larger than a word size. Unlikely, but
6667 handle it anyway for completeness. Assume a single threaded model since
6668 there is no atomic support in this case, and no barriers are required. */
6669 ret = expand_atomic_store (mem, const0_rtx, model, true);
6670 if (!ret)
6671 emit_move_insn (mem, const0_rtx);
6672 return const0_rtx;
6675 /* Expand an atomic test_and_set operation.
6676 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6677 EXP is the call expression. */
6679 static rtx
6680 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6682 rtx mem;
6683 enum memmodel model;
6684 machine_mode mode;
6686 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6687 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6688 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6690 return expand_atomic_test_and_set (target, mem, model);
6694 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6695 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6697 static tree
6698 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6700 int size;
6701 machine_mode mode;
6702 unsigned int mode_align, type_align;
6704 if (TREE_CODE (arg0) != INTEGER_CST)
6705 return NULL_TREE;
6707 /* We need a corresponding integer mode for the access to be lock-free. */
6708 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6709 if (!int_mode_for_size (size, 0).exists (&mode))
6710 return boolean_false_node;
6712 mode_align = GET_MODE_ALIGNMENT (mode);
6714 if (TREE_CODE (arg1) == INTEGER_CST)
6716 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6718 /* Either this argument is null, or it's a fake pointer encoding
6719 the alignment of the object. */
6720 val = least_bit_hwi (val);
6721 val *= BITS_PER_UNIT;
6723 if (val == 0 || mode_align < val)
6724 type_align = mode_align;
6725 else
6726 type_align = val;
6728 else
6730 tree ttype = TREE_TYPE (arg1);
6732 /* This function is usually invoked and folded immediately by the front
6733 end before anything else has a chance to look at it. The pointer
6734 parameter at this point is usually cast to a void *, so check for that
6735 and look past the cast. */
6736 if (CONVERT_EXPR_P (arg1)
6737 && POINTER_TYPE_P (ttype)
6738 && VOID_TYPE_P (TREE_TYPE (ttype))
6739 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6740 arg1 = TREE_OPERAND (arg1, 0);
6742 ttype = TREE_TYPE (arg1);
6743 gcc_assert (POINTER_TYPE_P (ttype));
6745 /* Get the underlying type of the object. */
6746 ttype = TREE_TYPE (ttype);
6747 type_align = TYPE_ALIGN (ttype);
6750 /* If the object has smaller alignment, the lock free routines cannot
6751 be used. */
6752 if (type_align < mode_align)
6753 return boolean_false_node;
6755 /* Check if a compare_and_swap pattern exists for the mode which represents
6756 the required size. The pattern is not allowed to fail, so the existence
6757 of the pattern indicates support is present. Also require that an
6758 atomic load exists for the required size. */
6759 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6760 return boolean_true_node;
6761 else
6762 return boolean_false_node;
6765 /* Return true if the parameters to call EXP represent an object which will
6766 always generate lock free instructions. The first argument represents the
6767 size of the object, and the second parameter is a pointer to the object
6768 itself. If NULL is passed for the object, then the result is based on
6769 typical alignment for an object of the specified size. Otherwise return
6770 false. */
6772 static rtx
6773 expand_builtin_atomic_always_lock_free (tree exp)
6775 tree size;
6776 tree arg0 = CALL_EXPR_ARG (exp, 0);
6777 tree arg1 = CALL_EXPR_ARG (exp, 1);
6779 if (TREE_CODE (arg0) != INTEGER_CST)
6781 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6782 return const0_rtx;
6785 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6786 if (size == boolean_true_node)
6787 return const1_rtx;
6788 return const0_rtx;
6791 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6792 is lock free on this architecture. */
6794 static tree
6795 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6797 if (!flag_inline_atomics)
6798 return NULL_TREE;
6800 /* If it isn't always lock free, don't generate a result. */
6801 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6802 return boolean_true_node;
6804 return NULL_TREE;
6807 /* Return true if the parameters to call EXP represent an object which will
6808 always generate lock free instructions. The first argument represents the
6809 size of the object, and the second parameter is a pointer to the object
6810 itself. If NULL is passed for the object, then the result is based on
6811 typical alignment for an object of the specified size. Otherwise return
6812 NULL*/
6814 static rtx
6815 expand_builtin_atomic_is_lock_free (tree exp)
6817 tree size;
6818 tree arg0 = CALL_EXPR_ARG (exp, 0);
6819 tree arg1 = CALL_EXPR_ARG (exp, 1);
6821 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6823 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6824 return NULL_RTX;
6827 if (!flag_inline_atomics)
6828 return NULL_RTX;
6830 /* If the value is known at compile time, return the RTX for it. */
6831 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6832 if (size == boolean_true_node)
6833 return const1_rtx;
6835 return NULL_RTX;
6838 /* Expand the __atomic_thread_fence intrinsic:
6839 void __atomic_thread_fence (enum memmodel)
6840 EXP is the CALL_EXPR. */
6842 static void
6843 expand_builtin_atomic_thread_fence (tree exp)
6845 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6846 expand_mem_thread_fence (model);
6849 /* Expand the __atomic_signal_fence intrinsic:
6850 void __atomic_signal_fence (enum memmodel)
6851 EXP is the CALL_EXPR. */
6853 static void
6854 expand_builtin_atomic_signal_fence (tree exp)
6856 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6857 expand_mem_signal_fence (model);
6860 /* Expand the __sync_synchronize intrinsic. */
6862 static void
6863 expand_builtin_sync_synchronize (void)
6865 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6868 static rtx
6869 expand_builtin_thread_pointer (tree exp, rtx target)
6871 enum insn_code icode;
6872 if (!validate_arglist (exp, VOID_TYPE))
6873 return const0_rtx;
6874 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6875 if (icode != CODE_FOR_nothing)
6877 struct expand_operand op;
6878 /* If the target is not sutitable then create a new target. */
6879 if (target == NULL_RTX
6880 || !REG_P (target)
6881 || GET_MODE (target) != Pmode)
6882 target = gen_reg_rtx (Pmode);
6883 create_output_operand (&op, target, Pmode);
6884 expand_insn (icode, 1, &op);
6885 return target;
6887 error ("%<__builtin_thread_pointer%> is not supported on this target");
6888 return const0_rtx;
6891 static void
6892 expand_builtin_set_thread_pointer (tree exp)
6894 enum insn_code icode;
6895 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6896 return;
6897 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6898 if (icode != CODE_FOR_nothing)
6900 struct expand_operand op;
6901 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6902 Pmode, EXPAND_NORMAL);
6903 create_input_operand (&op, val, Pmode);
6904 expand_insn (icode, 1, &op);
6905 return;
6907 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6911 /* Emit code to restore the current value of stack. */
6913 static void
6914 expand_stack_restore (tree var)
6916 rtx_insn *prev;
6917 rtx sa = expand_normal (var);
6919 sa = convert_memory_address (Pmode, sa);
6921 prev = get_last_insn ();
6922 emit_stack_restore (SAVE_BLOCK, sa);
6924 record_new_stack_level ();
6926 fixup_args_size_notes (prev, get_last_insn (), 0);
6929 /* Emit code to save the current value of stack. */
6931 static rtx
6932 expand_stack_save (void)
6934 rtx ret = NULL_RTX;
6936 emit_stack_save (SAVE_BLOCK, &ret);
6937 return ret;
6940 /* Emit code to get the openacc gang, worker or vector id or size. */
6942 static rtx
6943 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6945 const char *name;
6946 rtx fallback_retval;
6947 rtx_insn *(*gen_fn) (rtx, rtx);
6948 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6950 case BUILT_IN_GOACC_PARLEVEL_ID:
6951 name = "__builtin_goacc_parlevel_id";
6952 fallback_retval = const0_rtx;
6953 gen_fn = targetm.gen_oacc_dim_pos;
6954 break;
6955 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6956 name = "__builtin_goacc_parlevel_size";
6957 fallback_retval = const1_rtx;
6958 gen_fn = targetm.gen_oacc_dim_size;
6959 break;
6960 default:
6961 gcc_unreachable ();
6964 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6966 error ("%qs only supported in OpenACC code", name);
6967 return const0_rtx;
6970 tree arg = CALL_EXPR_ARG (exp, 0);
6971 if (TREE_CODE (arg) != INTEGER_CST)
6973 error ("non-constant argument 0 to %qs", name);
6974 return const0_rtx;
6977 int dim = TREE_INT_CST_LOW (arg);
6978 switch (dim)
6980 case GOMP_DIM_GANG:
6981 case GOMP_DIM_WORKER:
6982 case GOMP_DIM_VECTOR:
6983 break;
6984 default:
6985 error ("illegal argument 0 to %qs", name);
6986 return const0_rtx;
6989 if (ignore)
6990 return target;
6992 if (target == NULL_RTX)
6993 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6995 if (!targetm.have_oacc_dim_size ())
6997 emit_move_insn (target, fallback_retval);
6998 return target;
7001 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7002 emit_insn (gen_fn (reg, GEN_INT (dim)));
7003 if (reg != target)
7004 emit_move_insn (target, reg);
7006 return target;
7009 /* Expand a string compare operation using a sequence of char comparison
7010 to get rid of the calling overhead, with result going to TARGET if
7011 that's convenient.
7013 VAR_STR is the variable string source;
7014 CONST_STR is the constant string source;
7015 LENGTH is the number of chars to compare;
7016 CONST_STR_N indicates which source string is the constant string;
7017 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7019 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7021 target = (int) (unsigned char) var_str[0]
7022 - (int) (unsigned char) const_str[0];
7023 if (target != 0)
7024 goto ne_label;
7026 target = (int) (unsigned char) var_str[length - 2]
7027 - (int) (unsigned char) const_str[length - 2];
7028 if (target != 0)
7029 goto ne_label;
7030 target = (int) (unsigned char) var_str[length - 1]
7031 - (int) (unsigned char) const_str[length - 1];
7032 ne_label:
7035 static rtx
7036 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7037 unsigned HOST_WIDE_INT length,
7038 int const_str_n, machine_mode mode)
7040 HOST_WIDE_INT offset = 0;
7041 rtx var_rtx_array
7042 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7043 rtx var_rtx = NULL_RTX;
7044 rtx const_rtx = NULL_RTX;
7045 rtx result = target ? target : gen_reg_rtx (mode);
7046 rtx_code_label *ne_label = gen_label_rtx ();
7047 tree unit_type_node = unsigned_char_type_node;
7048 scalar_int_mode unit_mode
7049 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7051 start_sequence ();
7053 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7055 var_rtx
7056 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7057 const_rtx = c_readstr (const_str + offset, unit_mode);
7058 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7059 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7061 op0 = convert_modes (mode, unit_mode, op0, 1);
7062 op1 = convert_modes (mode, unit_mode, op1, 1);
7063 result = expand_simple_binop (mode, MINUS, op0, op1,
7064 result, 1, OPTAB_WIDEN);
7065 if (i < length - 1)
7066 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7067 mode, true, ne_label);
7068 offset += GET_MODE_SIZE (unit_mode);
7071 emit_label (ne_label);
7072 rtx_insn *insns = get_insns ();
7073 end_sequence ();
7074 emit_insn (insns);
7076 return result;
7079 /* Inline expansion a call to str(n)cmp, with result going to
7080 TARGET if that's convenient.
7081 If the call is not been inlined, return NULL_RTX. */
7082 static rtx
7083 inline_expand_builtin_string_cmp (tree exp, rtx target)
7085 tree fndecl = get_callee_fndecl (exp);
7086 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7087 unsigned HOST_WIDE_INT length = 0;
7088 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7090 /* Do NOT apply this inlining expansion when optimizing for size or
7091 optimization level below 2. */
7092 if (optimize < 2 || optimize_insn_for_size_p ())
7093 return NULL_RTX;
7095 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7096 || fcode == BUILT_IN_STRNCMP
7097 || fcode == BUILT_IN_MEMCMP);
7099 /* On a target where the type of the call (int) has same or narrower presicion
7100 than unsigned char, give up the inlining expansion. */
7101 if (TYPE_PRECISION (unsigned_char_type_node)
7102 >= TYPE_PRECISION (TREE_TYPE (exp)))
7103 return NULL_RTX;
7105 tree arg1 = CALL_EXPR_ARG (exp, 0);
7106 tree arg2 = CALL_EXPR_ARG (exp, 1);
7107 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7109 unsigned HOST_WIDE_INT len1 = 0;
7110 unsigned HOST_WIDE_INT len2 = 0;
7111 unsigned HOST_WIDE_INT len3 = 0;
7113 const char *src_str1 = c_getstr (arg1, &len1);
7114 const char *src_str2 = c_getstr (arg2, &len2);
7116 /* If neither strings is constant string, the call is not qualify. */
7117 if (!src_str1 && !src_str2)
7118 return NULL_RTX;
7120 /* For strncmp, if the length is not a const, not qualify. */
7121 if (is_ncmp)
7123 if (!tree_fits_uhwi_p (len3_tree))
7124 return NULL_RTX;
7125 else
7126 len3 = tree_to_uhwi (len3_tree);
7129 if (src_str1 != NULL)
7130 len1 = strnlen (src_str1, len1) + 1;
7132 if (src_str2 != NULL)
7133 len2 = strnlen (src_str2, len2) + 1;
7135 int const_str_n = 0;
7136 if (!len1)
7137 const_str_n = 2;
7138 else if (!len2)
7139 const_str_n = 1;
7140 else if (len2 > len1)
7141 const_str_n = 1;
7142 else
7143 const_str_n = 2;
7145 gcc_checking_assert (const_str_n > 0);
7146 length = (const_str_n == 1) ? len1 : len2;
7148 if (is_ncmp && len3 < length)
7149 length = len3;
7151 /* If the length of the comparision is larger than the threshold,
7152 do nothing. */
7153 if (length > (unsigned HOST_WIDE_INT)
7154 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7155 return NULL_RTX;
7157 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7159 /* Now, start inline expansion the call. */
7160 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7161 (const_str_n == 1) ? src_str1 : src_str2, length,
7162 const_str_n, mode);
7165 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7166 represents the size of the first argument to that call, or VOIDmode
7167 if the argument is a pointer. IGNORE will be true if the result
7168 isn't used. */
7169 static rtx
7170 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7171 bool ignore)
7173 rtx val, failsafe;
7174 unsigned nargs = call_expr_nargs (exp);
7176 tree arg0 = CALL_EXPR_ARG (exp, 0);
7178 if (mode == VOIDmode)
7180 mode = TYPE_MODE (TREE_TYPE (arg0));
7181 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7184 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7186 /* An optional second argument can be used as a failsafe value on
7187 some machines. If it isn't present, then the failsafe value is
7188 assumed to be 0. */
7189 if (nargs > 1)
7191 tree arg1 = CALL_EXPR_ARG (exp, 1);
7192 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7194 else
7195 failsafe = const0_rtx;
7197 /* If the result isn't used, the behavior is undefined. It would be
7198 nice to emit a warning here, but path splitting means this might
7199 happen with legitimate code. So simply drop the builtin
7200 expansion in that case; we've handled any side-effects above. */
7201 if (ignore)
7202 return const0_rtx;
7204 /* If we don't have a suitable target, create one to hold the result. */
7205 if (target == NULL || GET_MODE (target) != mode)
7206 target = gen_reg_rtx (mode);
7208 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7209 val = convert_modes (mode, VOIDmode, val, false);
7211 return targetm.speculation_safe_value (mode, target, val, failsafe);
7214 /* Expand an expression EXP that calls a built-in function,
7215 with result going to TARGET if that's convenient
7216 (and in mode MODE if that's convenient).
7217 SUBTARGET may be used as the target for computing one of EXP's operands.
7218 IGNORE is nonzero if the value is to be ignored. */
7221 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7222 int ignore)
7224 tree fndecl = get_callee_fndecl (exp);
7225 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7226 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7227 int flags;
7229 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7230 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7232 /* When ASan is enabled, we don't want to expand some memory/string
7233 builtins and rely on libsanitizer's hooks. This allows us to avoid
7234 redundant checks and be sure, that possible overflow will be detected
7235 by ASan. */
7237 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7238 return expand_call (exp, target, ignore);
7240 /* When not optimizing, generate calls to library functions for a certain
7241 set of builtins. */
7242 if (!optimize
7243 && !called_as_built_in (fndecl)
7244 && fcode != BUILT_IN_FORK
7245 && fcode != BUILT_IN_EXECL
7246 && fcode != BUILT_IN_EXECV
7247 && fcode != BUILT_IN_EXECLP
7248 && fcode != BUILT_IN_EXECLE
7249 && fcode != BUILT_IN_EXECVP
7250 && fcode != BUILT_IN_EXECVE
7251 && !ALLOCA_FUNCTION_CODE_P (fcode)
7252 && fcode != BUILT_IN_FREE)
7253 return expand_call (exp, target, ignore);
7255 /* The built-in function expanders test for target == const0_rtx
7256 to determine whether the function's result will be ignored. */
7257 if (ignore)
7258 target = const0_rtx;
7260 /* If the result of a pure or const built-in function is ignored, and
7261 none of its arguments are volatile, we can avoid expanding the
7262 built-in call and just evaluate the arguments for side-effects. */
7263 if (target == const0_rtx
7264 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7265 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7267 bool volatilep = false;
7268 tree arg;
7269 call_expr_arg_iterator iter;
7271 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7272 if (TREE_THIS_VOLATILE (arg))
7274 volatilep = true;
7275 break;
7278 if (! volatilep)
7280 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7281 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7282 return const0_rtx;
7286 switch (fcode)
7288 CASE_FLT_FN (BUILT_IN_FABS):
7289 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7290 case BUILT_IN_FABSD32:
7291 case BUILT_IN_FABSD64:
7292 case BUILT_IN_FABSD128:
7293 target = expand_builtin_fabs (exp, target, subtarget);
7294 if (target)
7295 return target;
7296 break;
7298 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7299 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7300 target = expand_builtin_copysign (exp, target, subtarget);
7301 if (target)
7302 return target;
7303 break;
7305 /* Just do a normal library call if we were unable to fold
7306 the values. */
7307 CASE_FLT_FN (BUILT_IN_CABS):
7308 break;
7310 CASE_FLT_FN (BUILT_IN_FMA):
7311 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7312 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7313 if (target)
7314 return target;
7315 break;
7317 CASE_FLT_FN (BUILT_IN_ILOGB):
7318 if (! flag_unsafe_math_optimizations)
7319 break;
7320 gcc_fallthrough ();
7321 CASE_FLT_FN (BUILT_IN_ISINF):
7322 CASE_FLT_FN (BUILT_IN_FINITE):
7323 case BUILT_IN_ISFINITE:
7324 case BUILT_IN_ISNORMAL:
7325 target = expand_builtin_interclass_mathfn (exp, target);
7326 if (target)
7327 return target;
7328 break;
7330 CASE_FLT_FN (BUILT_IN_ICEIL):
7331 CASE_FLT_FN (BUILT_IN_LCEIL):
7332 CASE_FLT_FN (BUILT_IN_LLCEIL):
7333 CASE_FLT_FN (BUILT_IN_LFLOOR):
7334 CASE_FLT_FN (BUILT_IN_IFLOOR):
7335 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7336 target = expand_builtin_int_roundingfn (exp, target);
7337 if (target)
7338 return target;
7339 break;
7341 CASE_FLT_FN (BUILT_IN_IRINT):
7342 CASE_FLT_FN (BUILT_IN_LRINT):
7343 CASE_FLT_FN (BUILT_IN_LLRINT):
7344 CASE_FLT_FN (BUILT_IN_IROUND):
7345 CASE_FLT_FN (BUILT_IN_LROUND):
7346 CASE_FLT_FN (BUILT_IN_LLROUND):
7347 target = expand_builtin_int_roundingfn_2 (exp, target);
7348 if (target)
7349 return target;
7350 break;
7352 CASE_FLT_FN (BUILT_IN_POWI):
7353 target = expand_builtin_powi (exp, target);
7354 if (target)
7355 return target;
7356 break;
7358 CASE_FLT_FN (BUILT_IN_CEXPI):
7359 target = expand_builtin_cexpi (exp, target);
7360 gcc_assert (target);
7361 return target;
7363 CASE_FLT_FN (BUILT_IN_SIN):
7364 CASE_FLT_FN (BUILT_IN_COS):
7365 if (! flag_unsafe_math_optimizations)
7366 break;
7367 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7368 if (target)
7369 return target;
7370 break;
7372 CASE_FLT_FN (BUILT_IN_SINCOS):
7373 if (! flag_unsafe_math_optimizations)
7374 break;
7375 target = expand_builtin_sincos (exp);
7376 if (target)
7377 return target;
7378 break;
7380 case BUILT_IN_APPLY_ARGS:
7381 return expand_builtin_apply_args ();
7383 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7384 FUNCTION with a copy of the parameters described by
7385 ARGUMENTS, and ARGSIZE. It returns a block of memory
7386 allocated on the stack into which is stored all the registers
7387 that might possibly be used for returning the result of a
7388 function. ARGUMENTS is the value returned by
7389 __builtin_apply_args. ARGSIZE is the number of bytes of
7390 arguments that must be copied. ??? How should this value be
7391 computed? We'll also need a safe worst case value for varargs
7392 functions. */
7393 case BUILT_IN_APPLY:
7394 if (!validate_arglist (exp, POINTER_TYPE,
7395 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7396 && !validate_arglist (exp, REFERENCE_TYPE,
7397 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7398 return const0_rtx;
7399 else
7401 rtx ops[3];
7403 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7404 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7405 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7407 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7410 /* __builtin_return (RESULT) causes the function to return the
7411 value described by RESULT. RESULT is address of the block of
7412 memory returned by __builtin_apply. */
7413 case BUILT_IN_RETURN:
7414 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7415 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7416 return const0_rtx;
7418 case BUILT_IN_SAVEREGS:
7419 return expand_builtin_saveregs ();
7421 case BUILT_IN_VA_ARG_PACK:
7422 /* All valid uses of __builtin_va_arg_pack () are removed during
7423 inlining. */
7424 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7425 return const0_rtx;
7427 case BUILT_IN_VA_ARG_PACK_LEN:
7428 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7429 inlining. */
7430 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7431 return const0_rtx;
7433 /* Return the address of the first anonymous stack arg. */
7434 case BUILT_IN_NEXT_ARG:
7435 if (fold_builtin_next_arg (exp, false))
7436 return const0_rtx;
7437 return expand_builtin_next_arg ();
7439 case BUILT_IN_CLEAR_CACHE:
7440 target = expand_builtin___clear_cache (exp);
7441 if (target)
7442 return target;
7443 break;
7445 case BUILT_IN_CLASSIFY_TYPE:
7446 return expand_builtin_classify_type (exp);
7448 case BUILT_IN_CONSTANT_P:
7449 return const0_rtx;
7451 case BUILT_IN_FRAME_ADDRESS:
7452 case BUILT_IN_RETURN_ADDRESS:
7453 return expand_builtin_frame_address (fndecl, exp);
7455 /* Returns the address of the area where the structure is returned.
7456 0 otherwise. */
7457 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7458 if (call_expr_nargs (exp) != 0
7459 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7460 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7461 return const0_rtx;
7462 else
7463 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7465 CASE_BUILT_IN_ALLOCA:
7466 target = expand_builtin_alloca (exp);
7467 if (target)
7468 return target;
7469 break;
7471 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7472 return expand_asan_emit_allocas_unpoison (exp);
7474 case BUILT_IN_STACK_SAVE:
7475 return expand_stack_save ();
7477 case BUILT_IN_STACK_RESTORE:
7478 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7479 return const0_rtx;
7481 case BUILT_IN_BSWAP16:
7482 case BUILT_IN_BSWAP32:
7483 case BUILT_IN_BSWAP64:
7484 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7485 if (target)
7486 return target;
7487 break;
7489 CASE_INT_FN (BUILT_IN_FFS):
7490 target = expand_builtin_unop (target_mode, exp, target,
7491 subtarget, ffs_optab);
7492 if (target)
7493 return target;
7494 break;
7496 CASE_INT_FN (BUILT_IN_CLZ):
7497 target = expand_builtin_unop (target_mode, exp, target,
7498 subtarget, clz_optab);
7499 if (target)
7500 return target;
7501 break;
7503 CASE_INT_FN (BUILT_IN_CTZ):
7504 target = expand_builtin_unop (target_mode, exp, target,
7505 subtarget, ctz_optab);
7506 if (target)
7507 return target;
7508 break;
7510 CASE_INT_FN (BUILT_IN_CLRSB):
7511 target = expand_builtin_unop (target_mode, exp, target,
7512 subtarget, clrsb_optab);
7513 if (target)
7514 return target;
7515 break;
7517 CASE_INT_FN (BUILT_IN_POPCOUNT):
7518 target = expand_builtin_unop (target_mode, exp, target,
7519 subtarget, popcount_optab);
7520 if (target)
7521 return target;
7522 break;
7524 CASE_INT_FN (BUILT_IN_PARITY):
7525 target = expand_builtin_unop (target_mode, exp, target,
7526 subtarget, parity_optab);
7527 if (target)
7528 return target;
7529 break;
7531 case BUILT_IN_STRLEN:
7532 target = expand_builtin_strlen (exp, target, target_mode);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_STRNLEN:
7538 target = expand_builtin_strnlen (exp, target, target_mode);
7539 if (target)
7540 return target;
7541 break;
7543 case BUILT_IN_STRCAT:
7544 target = expand_builtin_strcat (exp, target);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_STRCPY:
7550 target = expand_builtin_strcpy (exp, target);
7551 if (target)
7552 return target;
7553 break;
7555 case BUILT_IN_STRNCAT:
7556 target = expand_builtin_strncat (exp, target);
7557 if (target)
7558 return target;
7559 break;
7561 case BUILT_IN_STRNCPY:
7562 target = expand_builtin_strncpy (exp, target);
7563 if (target)
7564 return target;
7565 break;
7567 case BUILT_IN_STPCPY:
7568 target = expand_builtin_stpcpy (exp, target, mode);
7569 if (target)
7570 return target;
7571 break;
7573 case BUILT_IN_STPNCPY:
7574 target = expand_builtin_stpncpy (exp, target);
7575 if (target)
7576 return target;
7577 break;
7579 case BUILT_IN_MEMCHR:
7580 target = expand_builtin_memchr (exp, target);
7581 if (target)
7582 return target;
7583 break;
7585 case BUILT_IN_MEMCPY:
7586 target = expand_builtin_memcpy (exp, target);
7587 if (target)
7588 return target;
7589 break;
7591 case BUILT_IN_MEMMOVE:
7592 target = expand_builtin_memmove (exp, target);
7593 if (target)
7594 return target;
7595 break;
7597 case BUILT_IN_MEMPCPY:
7598 target = expand_builtin_mempcpy (exp, target);
7599 if (target)
7600 return target;
7601 break;
7603 case BUILT_IN_MEMSET:
7604 target = expand_builtin_memset (exp, target, mode);
7605 if (target)
7606 return target;
7607 break;
7609 case BUILT_IN_BZERO:
7610 target = expand_builtin_bzero (exp);
7611 if (target)
7612 return target;
7613 break;
7615 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7616 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7617 when changing it to a strcmp call. */
7618 case BUILT_IN_STRCMP_EQ:
7619 target = expand_builtin_memcmp (exp, target, true);
7620 if (target)
7621 return target;
7623 /* Change this call back to a BUILT_IN_STRCMP. */
7624 TREE_OPERAND (exp, 1)
7625 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7627 /* Delete the last parameter. */
7628 unsigned int i;
7629 vec<tree, va_gc> *arg_vec;
7630 vec_alloc (arg_vec, 2);
7631 for (i = 0; i < 2; i++)
7632 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7633 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7634 /* FALLTHROUGH */
7636 case BUILT_IN_STRCMP:
7637 target = expand_builtin_strcmp (exp, target);
7638 if (target)
7639 return target;
7640 break;
7642 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7643 back to a BUILT_IN_STRNCMP. */
7644 case BUILT_IN_STRNCMP_EQ:
7645 target = expand_builtin_memcmp (exp, target, true);
7646 if (target)
7647 return target;
7649 /* Change it back to a BUILT_IN_STRNCMP. */
7650 TREE_OPERAND (exp, 1)
7651 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7652 /* FALLTHROUGH */
7654 case BUILT_IN_STRNCMP:
7655 target = expand_builtin_strncmp (exp, target, mode);
7656 if (target)
7657 return target;
7658 break;
7660 case BUILT_IN_BCMP:
7661 case BUILT_IN_MEMCMP:
7662 case BUILT_IN_MEMCMP_EQ:
7663 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7664 if (target)
7665 return target;
7666 if (fcode == BUILT_IN_MEMCMP_EQ)
7668 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7669 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7671 break;
7673 case BUILT_IN_SETJMP:
7674 /* This should have been lowered to the builtins below. */
7675 gcc_unreachable ();
7677 case BUILT_IN_SETJMP_SETUP:
7678 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7679 and the receiver label. */
7680 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7682 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7683 VOIDmode, EXPAND_NORMAL);
7684 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7685 rtx_insn *label_r = label_rtx (label);
7687 /* This is copied from the handling of non-local gotos. */
7688 expand_builtin_setjmp_setup (buf_addr, label_r);
7689 nonlocal_goto_handler_labels
7690 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7691 nonlocal_goto_handler_labels);
7692 /* ??? Do not let expand_label treat us as such since we would
7693 not want to be both on the list of non-local labels and on
7694 the list of forced labels. */
7695 FORCED_LABEL (label) = 0;
7696 return const0_rtx;
7698 break;
7700 case BUILT_IN_SETJMP_RECEIVER:
7701 /* __builtin_setjmp_receiver is passed the receiver label. */
7702 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7704 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7705 rtx_insn *label_r = label_rtx (label);
7707 expand_builtin_setjmp_receiver (label_r);
7708 return const0_rtx;
7710 break;
7712 /* __builtin_longjmp is passed a pointer to an array of five words.
7713 It's similar to the C library longjmp function but works with
7714 __builtin_setjmp above. */
7715 case BUILT_IN_LONGJMP:
7716 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7718 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7719 VOIDmode, EXPAND_NORMAL);
7720 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7722 if (value != const1_rtx)
7724 error ("%<__builtin_longjmp%> second argument must be 1");
7725 return const0_rtx;
7728 expand_builtin_longjmp (buf_addr, value);
7729 return const0_rtx;
7731 break;
7733 case BUILT_IN_NONLOCAL_GOTO:
7734 target = expand_builtin_nonlocal_goto (exp);
7735 if (target)
7736 return target;
7737 break;
7739 /* This updates the setjmp buffer that is its argument with the value
7740 of the current stack pointer. */
7741 case BUILT_IN_UPDATE_SETJMP_BUF:
7742 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7744 rtx buf_addr
7745 = expand_normal (CALL_EXPR_ARG (exp, 0));
7747 expand_builtin_update_setjmp_buf (buf_addr);
7748 return const0_rtx;
7750 break;
7752 case BUILT_IN_TRAP:
7753 expand_builtin_trap ();
7754 return const0_rtx;
7756 case BUILT_IN_UNREACHABLE:
7757 expand_builtin_unreachable ();
7758 return const0_rtx;
7760 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7761 case BUILT_IN_SIGNBITD32:
7762 case BUILT_IN_SIGNBITD64:
7763 case BUILT_IN_SIGNBITD128:
7764 target = expand_builtin_signbit (exp, target);
7765 if (target)
7766 return target;
7767 break;
7769 /* Various hooks for the DWARF 2 __throw routine. */
7770 case BUILT_IN_UNWIND_INIT:
7771 expand_builtin_unwind_init ();
7772 return const0_rtx;
7773 case BUILT_IN_DWARF_CFA:
7774 return virtual_cfa_rtx;
7775 #ifdef DWARF2_UNWIND_INFO
7776 case BUILT_IN_DWARF_SP_COLUMN:
7777 return expand_builtin_dwarf_sp_column ();
7778 case BUILT_IN_INIT_DWARF_REG_SIZES:
7779 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7780 return const0_rtx;
7781 #endif
7782 case BUILT_IN_FROB_RETURN_ADDR:
7783 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7784 case BUILT_IN_EXTRACT_RETURN_ADDR:
7785 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7786 case BUILT_IN_EH_RETURN:
7787 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7788 CALL_EXPR_ARG (exp, 1));
7789 return const0_rtx;
7790 case BUILT_IN_EH_RETURN_DATA_REGNO:
7791 return expand_builtin_eh_return_data_regno (exp);
7792 case BUILT_IN_EXTEND_POINTER:
7793 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7794 case BUILT_IN_EH_POINTER:
7795 return expand_builtin_eh_pointer (exp);
7796 case BUILT_IN_EH_FILTER:
7797 return expand_builtin_eh_filter (exp);
7798 case BUILT_IN_EH_COPY_VALUES:
7799 return expand_builtin_eh_copy_values (exp);
7801 case BUILT_IN_VA_START:
7802 return expand_builtin_va_start (exp);
7803 case BUILT_IN_VA_END:
7804 return expand_builtin_va_end (exp);
7805 case BUILT_IN_VA_COPY:
7806 return expand_builtin_va_copy (exp);
7807 case BUILT_IN_EXPECT:
7808 return expand_builtin_expect (exp, target);
7809 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7810 return expand_builtin_expect_with_probability (exp, target);
7811 case BUILT_IN_ASSUME_ALIGNED:
7812 return expand_builtin_assume_aligned (exp, target);
7813 case BUILT_IN_PREFETCH:
7814 expand_builtin_prefetch (exp);
7815 return const0_rtx;
7817 case BUILT_IN_INIT_TRAMPOLINE:
7818 return expand_builtin_init_trampoline (exp, true);
7819 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7820 return expand_builtin_init_trampoline (exp, false);
7821 case BUILT_IN_ADJUST_TRAMPOLINE:
7822 return expand_builtin_adjust_trampoline (exp);
7824 case BUILT_IN_INIT_DESCRIPTOR:
7825 return expand_builtin_init_descriptor (exp);
7826 case BUILT_IN_ADJUST_DESCRIPTOR:
7827 return expand_builtin_adjust_descriptor (exp);
7829 case BUILT_IN_FORK:
7830 case BUILT_IN_EXECL:
7831 case BUILT_IN_EXECV:
7832 case BUILT_IN_EXECLP:
7833 case BUILT_IN_EXECLE:
7834 case BUILT_IN_EXECVP:
7835 case BUILT_IN_EXECVE:
7836 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7837 if (target)
7838 return target;
7839 break;
7841 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7842 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7843 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7844 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7845 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7846 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7847 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7848 if (target)
7849 return target;
7850 break;
7852 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7853 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7854 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7855 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7856 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7857 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7858 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7859 if (target)
7860 return target;
7861 break;
7863 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7864 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7865 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7866 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7867 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7868 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7869 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7870 if (target)
7871 return target;
7872 break;
7874 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7875 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7876 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7877 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7878 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7879 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7880 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7881 if (target)
7882 return target;
7883 break;
7885 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7886 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7887 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7888 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7889 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7890 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7891 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7892 if (target)
7893 return target;
7894 break;
7896 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7897 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7898 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7899 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7900 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7901 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7902 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7903 if (target)
7904 return target;
7905 break;
7907 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7908 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7909 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7910 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7911 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7912 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7913 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7914 if (target)
7915 return target;
7916 break;
7918 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7919 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7920 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7921 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7922 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7923 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7924 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7925 if (target)
7926 return target;
7927 break;
7929 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7930 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7931 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7932 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7933 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7935 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7936 if (target)
7937 return target;
7938 break;
7940 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7941 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7942 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7943 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7944 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7945 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7946 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7947 if (target)
7948 return target;
7949 break;
7951 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7952 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7953 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7954 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7955 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7956 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7957 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7958 if (target)
7959 return target;
7960 break;
7962 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7963 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7964 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7965 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7966 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7967 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7968 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7974 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7975 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7976 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7977 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7978 if (mode == VOIDmode)
7979 mode = TYPE_MODE (boolean_type_node);
7980 if (!target || !register_operand (target, mode))
7981 target = gen_reg_rtx (mode);
7983 mode = get_builtin_sync_mode
7984 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7985 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7986 if (target)
7987 return target;
7988 break;
7990 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7991 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7992 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7993 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7994 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7995 mode = get_builtin_sync_mode
7996 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7997 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7998 if (target)
7999 return target;
8000 break;
8002 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8003 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8004 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8005 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8006 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8007 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8008 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8009 if (target)
8010 return target;
8011 break;
8013 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8014 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8015 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8016 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8017 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8018 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8019 expand_builtin_sync_lock_release (mode, exp);
8020 return const0_rtx;
8022 case BUILT_IN_SYNC_SYNCHRONIZE:
8023 expand_builtin_sync_synchronize ();
8024 return const0_rtx;
8026 case BUILT_IN_ATOMIC_EXCHANGE_1:
8027 case BUILT_IN_ATOMIC_EXCHANGE_2:
8028 case BUILT_IN_ATOMIC_EXCHANGE_4:
8029 case BUILT_IN_ATOMIC_EXCHANGE_8:
8030 case BUILT_IN_ATOMIC_EXCHANGE_16:
8031 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8032 target = expand_builtin_atomic_exchange (mode, exp, target);
8033 if (target)
8034 return target;
8035 break;
8037 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8038 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8039 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8040 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8041 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8043 unsigned int nargs, z;
8044 vec<tree, va_gc> *vec;
8046 mode =
8047 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8048 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8049 if (target)
8050 return target;
8052 /* If this is turned into an external library call, the weak parameter
8053 must be dropped to match the expected parameter list. */
8054 nargs = call_expr_nargs (exp);
8055 vec_alloc (vec, nargs - 1);
8056 for (z = 0; z < 3; z++)
8057 vec->quick_push (CALL_EXPR_ARG (exp, z));
8058 /* Skip the boolean weak parameter. */
8059 for (z = 4; z < 6; z++)
8060 vec->quick_push (CALL_EXPR_ARG (exp, z));
8061 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8062 break;
8065 case BUILT_IN_ATOMIC_LOAD_1:
8066 case BUILT_IN_ATOMIC_LOAD_2:
8067 case BUILT_IN_ATOMIC_LOAD_4:
8068 case BUILT_IN_ATOMIC_LOAD_8:
8069 case BUILT_IN_ATOMIC_LOAD_16:
8070 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8071 target = expand_builtin_atomic_load (mode, exp, target);
8072 if (target)
8073 return target;
8074 break;
8076 case BUILT_IN_ATOMIC_STORE_1:
8077 case BUILT_IN_ATOMIC_STORE_2:
8078 case BUILT_IN_ATOMIC_STORE_4:
8079 case BUILT_IN_ATOMIC_STORE_8:
8080 case BUILT_IN_ATOMIC_STORE_16:
8081 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8082 target = expand_builtin_atomic_store (mode, exp);
8083 if (target)
8084 return const0_rtx;
8085 break;
8087 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8088 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8089 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8090 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8091 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8093 enum built_in_function lib;
8094 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8095 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8096 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8097 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8098 ignore, lib);
8099 if (target)
8100 return target;
8101 break;
8103 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8104 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8105 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8106 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8107 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8109 enum built_in_function lib;
8110 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8111 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8112 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8113 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8114 ignore, lib);
8115 if (target)
8116 return target;
8117 break;
8119 case BUILT_IN_ATOMIC_AND_FETCH_1:
8120 case BUILT_IN_ATOMIC_AND_FETCH_2:
8121 case BUILT_IN_ATOMIC_AND_FETCH_4:
8122 case BUILT_IN_ATOMIC_AND_FETCH_8:
8123 case BUILT_IN_ATOMIC_AND_FETCH_16:
8125 enum built_in_function lib;
8126 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8127 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8128 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8129 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8130 ignore, lib);
8131 if (target)
8132 return target;
8133 break;
8135 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8136 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8137 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8138 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8139 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8141 enum built_in_function lib;
8142 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8143 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8144 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8145 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8146 ignore, lib);
8147 if (target)
8148 return target;
8149 break;
8151 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8152 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8153 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8154 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8155 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8157 enum built_in_function lib;
8158 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8159 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8160 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8161 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8162 ignore, lib);
8163 if (target)
8164 return target;
8165 break;
8167 case BUILT_IN_ATOMIC_OR_FETCH_1:
8168 case BUILT_IN_ATOMIC_OR_FETCH_2:
8169 case BUILT_IN_ATOMIC_OR_FETCH_4:
8170 case BUILT_IN_ATOMIC_OR_FETCH_8:
8171 case BUILT_IN_ATOMIC_OR_FETCH_16:
8173 enum built_in_function lib;
8174 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8175 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8176 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8177 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8178 ignore, lib);
8179 if (target)
8180 return target;
8181 break;
8183 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8184 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8185 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8186 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8187 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8188 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8189 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8190 ignore, BUILT_IN_NONE);
8191 if (target)
8192 return target;
8193 break;
8195 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8196 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8197 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8198 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8199 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8200 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8201 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8202 ignore, BUILT_IN_NONE);
8203 if (target)
8204 return target;
8205 break;
8207 case BUILT_IN_ATOMIC_FETCH_AND_1:
8208 case BUILT_IN_ATOMIC_FETCH_AND_2:
8209 case BUILT_IN_ATOMIC_FETCH_AND_4:
8210 case BUILT_IN_ATOMIC_FETCH_AND_8:
8211 case BUILT_IN_ATOMIC_FETCH_AND_16:
8212 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8213 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8214 ignore, BUILT_IN_NONE);
8215 if (target)
8216 return target;
8217 break;
8219 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8220 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8221 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8222 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8223 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8224 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8225 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8226 ignore, BUILT_IN_NONE);
8227 if (target)
8228 return target;
8229 break;
8231 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8232 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8233 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8234 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8235 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8236 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8237 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8238 ignore, BUILT_IN_NONE);
8239 if (target)
8240 return target;
8241 break;
8243 case BUILT_IN_ATOMIC_FETCH_OR_1:
8244 case BUILT_IN_ATOMIC_FETCH_OR_2:
8245 case BUILT_IN_ATOMIC_FETCH_OR_4:
8246 case BUILT_IN_ATOMIC_FETCH_OR_8:
8247 case BUILT_IN_ATOMIC_FETCH_OR_16:
8248 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8249 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8250 ignore, BUILT_IN_NONE);
8251 if (target)
8252 return target;
8253 break;
8255 case BUILT_IN_ATOMIC_TEST_AND_SET:
8256 return expand_builtin_atomic_test_and_set (exp, target);
8258 case BUILT_IN_ATOMIC_CLEAR:
8259 return expand_builtin_atomic_clear (exp);
8261 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8262 return expand_builtin_atomic_always_lock_free (exp);
8264 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8265 target = expand_builtin_atomic_is_lock_free (exp);
8266 if (target)
8267 return target;
8268 break;
8270 case BUILT_IN_ATOMIC_THREAD_FENCE:
8271 expand_builtin_atomic_thread_fence (exp);
8272 return const0_rtx;
8274 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8275 expand_builtin_atomic_signal_fence (exp);
8276 return const0_rtx;
8278 case BUILT_IN_OBJECT_SIZE:
8279 return expand_builtin_object_size (exp);
8281 case BUILT_IN_MEMCPY_CHK:
8282 case BUILT_IN_MEMPCPY_CHK:
8283 case BUILT_IN_MEMMOVE_CHK:
8284 case BUILT_IN_MEMSET_CHK:
8285 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8286 if (target)
8287 return target;
8288 break;
8290 case BUILT_IN_STRCPY_CHK:
8291 case BUILT_IN_STPCPY_CHK:
8292 case BUILT_IN_STRNCPY_CHK:
8293 case BUILT_IN_STPNCPY_CHK:
8294 case BUILT_IN_STRCAT_CHK:
8295 case BUILT_IN_STRNCAT_CHK:
8296 case BUILT_IN_SNPRINTF_CHK:
8297 case BUILT_IN_VSNPRINTF_CHK:
8298 maybe_emit_chk_warning (exp, fcode);
8299 break;
8301 case BUILT_IN_SPRINTF_CHK:
8302 case BUILT_IN_VSPRINTF_CHK:
8303 maybe_emit_sprintf_chk_warning (exp, fcode);
8304 break;
8306 case BUILT_IN_FREE:
8307 if (warn_free_nonheap_object)
8308 maybe_emit_free_warning (exp);
8309 break;
8311 case BUILT_IN_THREAD_POINTER:
8312 return expand_builtin_thread_pointer (exp, target);
8314 case BUILT_IN_SET_THREAD_POINTER:
8315 expand_builtin_set_thread_pointer (exp);
8316 return const0_rtx;
8318 case BUILT_IN_ACC_ON_DEVICE:
8319 /* Do library call, if we failed to expand the builtin when
8320 folding. */
8321 break;
8323 case BUILT_IN_GOACC_PARLEVEL_ID:
8324 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8325 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8327 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8328 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8330 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8331 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8332 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8333 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8334 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8336 return expand_speculation_safe_value (mode, exp, target, ignore);
8338 default: /* just do library call, if unknown builtin */
8339 break;
8342 /* The switch statement above can drop through to cause the function
8343 to be called normally. */
8344 return expand_call (exp, target, ignore);
8347 /* Determine whether a tree node represents a call to a built-in
8348 function. If the tree T is a call to a built-in function with
8349 the right number of arguments of the appropriate types, return
8350 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8351 Otherwise the return value is END_BUILTINS. */
8353 enum built_in_function
8354 builtin_mathfn_code (const_tree t)
8356 const_tree fndecl, arg, parmlist;
8357 const_tree argtype, parmtype;
8358 const_call_expr_arg_iterator iter;
8360 if (TREE_CODE (t) != CALL_EXPR)
8361 return END_BUILTINS;
8363 fndecl = get_callee_fndecl (t);
8364 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8365 return END_BUILTINS;
8367 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8368 init_const_call_expr_arg_iterator (t, &iter);
8369 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8371 /* If a function doesn't take a variable number of arguments,
8372 the last element in the list will have type `void'. */
8373 parmtype = TREE_VALUE (parmlist);
8374 if (VOID_TYPE_P (parmtype))
8376 if (more_const_call_expr_args_p (&iter))
8377 return END_BUILTINS;
8378 return DECL_FUNCTION_CODE (fndecl);
8381 if (! more_const_call_expr_args_p (&iter))
8382 return END_BUILTINS;
8384 arg = next_const_call_expr_arg (&iter);
8385 argtype = TREE_TYPE (arg);
8387 if (SCALAR_FLOAT_TYPE_P (parmtype))
8389 if (! SCALAR_FLOAT_TYPE_P (argtype))
8390 return END_BUILTINS;
8392 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8394 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8395 return END_BUILTINS;
8397 else if (POINTER_TYPE_P (parmtype))
8399 if (! POINTER_TYPE_P (argtype))
8400 return END_BUILTINS;
8402 else if (INTEGRAL_TYPE_P (parmtype))
8404 if (! INTEGRAL_TYPE_P (argtype))
8405 return END_BUILTINS;
8407 else
8408 return END_BUILTINS;
8411 /* Variable-length argument list. */
8412 return DECL_FUNCTION_CODE (fndecl);
8415 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8416 evaluate to a constant. */
8418 static tree
8419 fold_builtin_constant_p (tree arg)
8421 /* We return 1 for a numeric type that's known to be a constant
8422 value at compile-time or for an aggregate type that's a
8423 literal constant. */
8424 STRIP_NOPS (arg);
8426 /* If we know this is a constant, emit the constant of one. */
8427 if (CONSTANT_CLASS_P (arg)
8428 || (TREE_CODE (arg) == CONSTRUCTOR
8429 && TREE_CONSTANT (arg)))
8430 return integer_one_node;
8431 if (TREE_CODE (arg) == ADDR_EXPR)
8433 tree op = TREE_OPERAND (arg, 0);
8434 if (TREE_CODE (op) == STRING_CST
8435 || (TREE_CODE (op) == ARRAY_REF
8436 && integer_zerop (TREE_OPERAND (op, 1))
8437 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8438 return integer_one_node;
8441 /* If this expression has side effects, show we don't know it to be a
8442 constant. Likewise if it's a pointer or aggregate type since in
8443 those case we only want literals, since those are only optimized
8444 when generating RTL, not later.
8445 And finally, if we are compiling an initializer, not code, we
8446 need to return a definite result now; there's not going to be any
8447 more optimization done. */
8448 if (TREE_SIDE_EFFECTS (arg)
8449 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8450 || POINTER_TYPE_P (TREE_TYPE (arg))
8451 || cfun == 0
8452 || folding_initializer
8453 || force_folding_builtin_constant_p)
8454 return integer_zero_node;
8456 return NULL_TREE;
8459 /* Create builtin_expect or builtin_expect_with_probability
8460 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8461 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8462 builtin_expect_with_probability instead uses third argument as PROBABILITY
8463 value. */
8465 static tree
8466 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8467 tree predictor, tree probability)
8469 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8471 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8472 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8473 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8474 ret_type = TREE_TYPE (TREE_TYPE (fn));
8475 pred_type = TREE_VALUE (arg_types);
8476 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8478 pred = fold_convert_loc (loc, pred_type, pred);
8479 expected = fold_convert_loc (loc, expected_type, expected);
8481 if (probability)
8482 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8483 else
8484 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8485 predictor);
8487 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8488 build_int_cst (ret_type, 0));
8491 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8492 NULL_TREE if no simplification is possible. */
8494 tree
8495 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8496 tree arg3)
8498 tree inner, fndecl, inner_arg0;
8499 enum tree_code code;
8501 /* Distribute the expected value over short-circuiting operators.
8502 See through the cast from truthvalue_type_node to long. */
8503 inner_arg0 = arg0;
8504 while (CONVERT_EXPR_P (inner_arg0)
8505 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8506 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8507 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8509 /* If this is a builtin_expect within a builtin_expect keep the
8510 inner one. See through a comparison against a constant. It
8511 might have been added to create a thruthvalue. */
8512 inner = inner_arg0;
8514 if (COMPARISON_CLASS_P (inner)
8515 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8516 inner = TREE_OPERAND (inner, 0);
8518 if (TREE_CODE (inner) == CALL_EXPR
8519 && (fndecl = get_callee_fndecl (inner))
8520 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8521 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8522 return arg0;
8524 inner = inner_arg0;
8525 code = TREE_CODE (inner);
8526 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8528 tree op0 = TREE_OPERAND (inner, 0);
8529 tree op1 = TREE_OPERAND (inner, 1);
8530 arg1 = save_expr (arg1);
8532 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8533 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8534 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8536 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8539 /* If the argument isn't invariant then there's nothing else we can do. */
8540 if (!TREE_CONSTANT (inner_arg0))
8541 return NULL_TREE;
8543 /* If we expect that a comparison against the argument will fold to
8544 a constant return the constant. In practice, this means a true
8545 constant or the address of a non-weak symbol. */
8546 inner = inner_arg0;
8547 STRIP_NOPS (inner);
8548 if (TREE_CODE (inner) == ADDR_EXPR)
8552 inner = TREE_OPERAND (inner, 0);
8554 while (TREE_CODE (inner) == COMPONENT_REF
8555 || TREE_CODE (inner) == ARRAY_REF);
8556 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8557 return NULL_TREE;
8560 /* Otherwise, ARG0 already has the proper type for the return value. */
8561 return arg0;
8564 /* Fold a call to __builtin_classify_type with argument ARG. */
8566 static tree
8567 fold_builtin_classify_type (tree arg)
8569 if (arg == 0)
8570 return build_int_cst (integer_type_node, no_type_class);
8572 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8575 /* Fold a call to __builtin_strlen with argument ARG. */
8577 static tree
8578 fold_builtin_strlen (location_t loc, tree type, tree arg)
8580 if (!validate_arg (arg, POINTER_TYPE))
8581 return NULL_TREE;
8582 else
8584 c_strlen_data lendata = { };
8585 tree len = c_strlen (arg, 0, &lendata);
8587 if (len)
8588 return fold_convert_loc (loc, type, len);
8590 if (!lendata.decl)
8591 c_strlen (arg, 1, &lendata);
8593 if (lendata.decl)
8595 if (EXPR_HAS_LOCATION (arg))
8596 loc = EXPR_LOCATION (arg);
8597 else if (loc == UNKNOWN_LOCATION)
8598 loc = input_location;
8599 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8602 return NULL_TREE;
8606 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8608 static tree
8609 fold_builtin_inf (location_t loc, tree type, int warn)
8611 REAL_VALUE_TYPE real;
8613 /* __builtin_inff is intended to be usable to define INFINITY on all
8614 targets. If an infinity is not available, INFINITY expands "to a
8615 positive constant of type float that overflows at translation
8616 time", footnote "In this case, using INFINITY will violate the
8617 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8618 Thus we pedwarn to ensure this constraint violation is
8619 diagnosed. */
8620 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8621 pedwarn (loc, 0, "target format does not support infinity");
8623 real_inf (&real);
8624 return build_real (type, real);
8627 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8628 NULL_TREE if no simplification can be made. */
8630 static tree
8631 fold_builtin_sincos (location_t loc,
8632 tree arg0, tree arg1, tree arg2)
8634 tree type;
8635 tree fndecl, call = NULL_TREE;
8637 if (!validate_arg (arg0, REAL_TYPE)
8638 || !validate_arg (arg1, POINTER_TYPE)
8639 || !validate_arg (arg2, POINTER_TYPE))
8640 return NULL_TREE;
8642 type = TREE_TYPE (arg0);
8644 /* Calculate the result when the argument is a constant. */
8645 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8646 if (fn == END_BUILTINS)
8647 return NULL_TREE;
8649 /* Canonicalize sincos to cexpi. */
8650 if (TREE_CODE (arg0) == REAL_CST)
8652 tree complex_type = build_complex_type (type);
8653 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8655 if (!call)
8657 if (!targetm.libc_has_function (function_c99_math_complex)
8658 || !builtin_decl_implicit_p (fn))
8659 return NULL_TREE;
8660 fndecl = builtin_decl_explicit (fn);
8661 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8662 call = builtin_save_expr (call);
8665 tree ptype = build_pointer_type (type);
8666 arg1 = fold_convert (ptype, arg1);
8667 arg2 = fold_convert (ptype, arg2);
8668 return build2 (COMPOUND_EXPR, void_type_node,
8669 build2 (MODIFY_EXPR, void_type_node,
8670 build_fold_indirect_ref_loc (loc, arg1),
8671 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8672 build2 (MODIFY_EXPR, void_type_node,
8673 build_fold_indirect_ref_loc (loc, arg2),
8674 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8677 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8678 Return NULL_TREE if no simplification can be made. */
8680 static tree
8681 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8683 if (!validate_arg (arg1, POINTER_TYPE)
8684 || !validate_arg (arg2, POINTER_TYPE)
8685 || !validate_arg (len, INTEGER_TYPE))
8686 return NULL_TREE;
8688 /* If the LEN parameter is zero, return zero. */
8689 if (integer_zerop (len))
8690 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8691 arg1, arg2);
8693 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8694 if (operand_equal_p (arg1, arg2, 0))
8695 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8697 /* If len parameter is one, return an expression corresponding to
8698 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8699 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8701 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8702 tree cst_uchar_ptr_node
8703 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8705 tree ind1
8706 = fold_convert_loc (loc, integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert_loc (loc,
8709 cst_uchar_ptr_node,
8710 arg1)));
8711 tree ind2
8712 = fold_convert_loc (loc, integer_type_node,
8713 build1 (INDIRECT_REF, cst_uchar_node,
8714 fold_convert_loc (loc,
8715 cst_uchar_ptr_node,
8716 arg2)));
8717 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8720 return NULL_TREE;
8723 /* Fold a call to builtin isascii with argument ARG. */
8725 static tree
8726 fold_builtin_isascii (location_t loc, tree arg)
8728 if (!validate_arg (arg, INTEGER_TYPE))
8729 return NULL_TREE;
8730 else
8732 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8733 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8734 build_int_cst (integer_type_node,
8735 ~ (unsigned HOST_WIDE_INT) 0x7f));
8736 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8737 arg, integer_zero_node);
8741 /* Fold a call to builtin toascii with argument ARG. */
8743 static tree
8744 fold_builtin_toascii (location_t loc, tree arg)
8746 if (!validate_arg (arg, INTEGER_TYPE))
8747 return NULL_TREE;
8749 /* Transform toascii(c) -> (c & 0x7f). */
8750 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8751 build_int_cst (integer_type_node, 0x7f));
8754 /* Fold a call to builtin isdigit with argument ARG. */
8756 static tree
8757 fold_builtin_isdigit (location_t loc, tree arg)
8759 if (!validate_arg (arg, INTEGER_TYPE))
8760 return NULL_TREE;
8761 else
8763 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8764 /* According to the C standard, isdigit is unaffected by locale.
8765 However, it definitely is affected by the target character set. */
8766 unsigned HOST_WIDE_INT target_digit0
8767 = lang_hooks.to_target_charset ('0');
8769 if (target_digit0 == 0)
8770 return NULL_TREE;
8772 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8773 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8774 build_int_cst (unsigned_type_node, target_digit0));
8775 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8776 build_int_cst (unsigned_type_node, 9));
8780 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8782 static tree
8783 fold_builtin_fabs (location_t loc, tree arg, tree type)
8785 if (!validate_arg (arg, REAL_TYPE))
8786 return NULL_TREE;
8788 arg = fold_convert_loc (loc, type, arg);
8789 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8792 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8794 static tree
8795 fold_builtin_abs (location_t loc, tree arg, tree type)
8797 if (!validate_arg (arg, INTEGER_TYPE))
8798 return NULL_TREE;
8800 arg = fold_convert_loc (loc, type, arg);
8801 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8804 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8806 static tree
8807 fold_builtin_carg (location_t loc, tree arg, tree type)
8809 if (validate_arg (arg, COMPLEX_TYPE)
8810 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8812 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8814 if (atan2_fn)
8816 tree new_arg = builtin_save_expr (arg);
8817 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8818 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8819 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8823 return NULL_TREE;
8826 /* Fold a call to builtin frexp, we can assume the base is 2. */
8828 static tree
8829 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8831 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8832 return NULL_TREE;
8834 STRIP_NOPS (arg0);
8836 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8837 return NULL_TREE;
8839 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8841 /* Proceed if a valid pointer type was passed in. */
8842 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8844 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8845 tree frac, exp;
8847 switch (value->cl)
8849 case rvc_zero:
8850 /* For +-0, return (*exp = 0, +-0). */
8851 exp = integer_zero_node;
8852 frac = arg0;
8853 break;
8854 case rvc_nan:
8855 case rvc_inf:
8856 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8857 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8858 case rvc_normal:
8860 /* Since the frexp function always expects base 2, and in
8861 GCC normalized significands are already in the range
8862 [0.5, 1.0), we have exactly what frexp wants. */
8863 REAL_VALUE_TYPE frac_rvt = *value;
8864 SET_REAL_EXP (&frac_rvt, 0);
8865 frac = build_real (rettype, frac_rvt);
8866 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8868 break;
8869 default:
8870 gcc_unreachable ();
8873 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8874 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8875 TREE_SIDE_EFFECTS (arg1) = 1;
8876 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8879 return NULL_TREE;
8882 /* Fold a call to builtin modf. */
8884 static tree
8885 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8887 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8888 return NULL_TREE;
8890 STRIP_NOPS (arg0);
8892 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8893 return NULL_TREE;
8895 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8897 /* Proceed if a valid pointer type was passed in. */
8898 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8900 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8901 REAL_VALUE_TYPE trunc, frac;
8903 switch (value->cl)
8905 case rvc_nan:
8906 case rvc_zero:
8907 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8908 trunc = frac = *value;
8909 break;
8910 case rvc_inf:
8911 /* For +-Inf, return (*arg1 = arg0, +-0). */
8912 frac = dconst0;
8913 frac.sign = value->sign;
8914 trunc = *value;
8915 break;
8916 case rvc_normal:
8917 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8918 real_trunc (&trunc, VOIDmode, value);
8919 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8920 /* If the original number was negative and already
8921 integral, then the fractional part is -0.0. */
8922 if (value->sign && frac.cl == rvc_zero)
8923 frac.sign = value->sign;
8924 break;
8927 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8928 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8929 build_real (rettype, trunc));
8930 TREE_SIDE_EFFECTS (arg1) = 1;
8931 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8932 build_real (rettype, frac));
8935 return NULL_TREE;
8938 /* Given a location LOC, an interclass builtin function decl FNDECL
8939 and its single argument ARG, return an folded expression computing
8940 the same, or NULL_TREE if we either couldn't or didn't want to fold
8941 (the latter happen if there's an RTL instruction available). */
8943 static tree
8944 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8946 machine_mode mode;
8948 if (!validate_arg (arg, REAL_TYPE))
8949 return NULL_TREE;
8951 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8952 return NULL_TREE;
8954 mode = TYPE_MODE (TREE_TYPE (arg));
8956 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8958 /* If there is no optab, try generic code. */
8959 switch (DECL_FUNCTION_CODE (fndecl))
8961 tree result;
8963 CASE_FLT_FN (BUILT_IN_ISINF):
8965 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8966 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8967 tree type = TREE_TYPE (arg);
8968 REAL_VALUE_TYPE r;
8969 char buf[128];
8971 if (is_ibm_extended)
8973 /* NaN and Inf are encoded in the high-order double value
8974 only. The low-order value is not significant. */
8975 type = double_type_node;
8976 mode = DFmode;
8977 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8979 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8980 real_from_string (&r, buf);
8981 result = build_call_expr (isgr_fn, 2,
8982 fold_build1_loc (loc, ABS_EXPR, type, arg),
8983 build_real (type, r));
8984 return result;
8986 CASE_FLT_FN (BUILT_IN_FINITE):
8987 case BUILT_IN_ISFINITE:
8989 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8990 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8991 tree type = TREE_TYPE (arg);
8992 REAL_VALUE_TYPE r;
8993 char buf[128];
8995 if (is_ibm_extended)
8997 /* NaN and Inf are encoded in the high-order double value
8998 only. The low-order value is not significant. */
8999 type = double_type_node;
9000 mode = DFmode;
9001 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9003 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9004 real_from_string (&r, buf);
9005 result = build_call_expr (isle_fn, 2,
9006 fold_build1_loc (loc, ABS_EXPR, type, arg),
9007 build_real (type, r));
9008 /*result = fold_build2_loc (loc, UNGT_EXPR,
9009 TREE_TYPE (TREE_TYPE (fndecl)),
9010 fold_build1_loc (loc, ABS_EXPR, type, arg),
9011 build_real (type, r));
9012 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9013 TREE_TYPE (TREE_TYPE (fndecl)),
9014 result);*/
9015 return result;
9017 case BUILT_IN_ISNORMAL:
9019 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9020 islessequal(fabs(x),DBL_MAX). */
9021 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9022 tree type = TREE_TYPE (arg);
9023 tree orig_arg, max_exp, min_exp;
9024 machine_mode orig_mode = mode;
9025 REAL_VALUE_TYPE rmax, rmin;
9026 char buf[128];
9028 orig_arg = arg = builtin_save_expr (arg);
9029 if (is_ibm_extended)
9031 /* Use double to test the normal range of IBM extended
9032 precision. Emin for IBM extended precision is
9033 different to emin for IEEE double, being 53 higher
9034 since the low double exponent is at least 53 lower
9035 than the high double exponent. */
9036 type = double_type_node;
9037 mode = DFmode;
9038 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9040 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9042 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9043 real_from_string (&rmax, buf);
9044 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9045 real_from_string (&rmin, buf);
9046 max_exp = build_real (type, rmax);
9047 min_exp = build_real (type, rmin);
9049 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9050 if (is_ibm_extended)
9052 /* Testing the high end of the range is done just using
9053 the high double, using the same test as isfinite().
9054 For the subnormal end of the range we first test the
9055 high double, then if its magnitude is equal to the
9056 limit of 0x1p-969, we test whether the low double is
9057 non-zero and opposite sign to the high double. */
9058 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9059 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9060 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9061 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9062 arg, min_exp);
9063 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9064 complex_double_type_node, orig_arg);
9065 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9066 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9067 tree zero = build_real (type, dconst0);
9068 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9069 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9070 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9071 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9072 fold_build3 (COND_EXPR,
9073 integer_type_node,
9074 hilt, logt, lolt));
9075 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9076 eq_min, ok_lo);
9077 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9078 gt_min, eq_min);
9080 else
9082 tree const isge_fn
9083 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9084 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9086 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9087 max_exp, min_exp);
9088 return result;
9090 default:
9091 break;
9094 return NULL_TREE;
9097 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9098 ARG is the argument for the call. */
9100 static tree
9101 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9103 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9105 if (!validate_arg (arg, REAL_TYPE))
9106 return NULL_TREE;
9108 switch (builtin_index)
9110 case BUILT_IN_ISINF:
9111 if (!HONOR_INFINITIES (arg))
9112 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9114 return NULL_TREE;
9116 case BUILT_IN_ISINF_SIGN:
9118 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9119 /* In a boolean context, GCC will fold the inner COND_EXPR to
9120 1. So e.g. "if (isinf_sign(x))" would be folded to just
9121 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9122 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9123 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9124 tree tmp = NULL_TREE;
9126 arg = builtin_save_expr (arg);
9128 if (signbit_fn && isinf_fn)
9130 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9131 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9133 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9134 signbit_call, integer_zero_node);
9135 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9136 isinf_call, integer_zero_node);
9138 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9139 integer_minus_one_node, integer_one_node);
9140 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9141 isinf_call, tmp,
9142 integer_zero_node);
9145 return tmp;
9148 case BUILT_IN_ISFINITE:
9149 if (!HONOR_NANS (arg)
9150 && !HONOR_INFINITIES (arg))
9151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9153 return NULL_TREE;
9155 case BUILT_IN_ISNAN:
9156 if (!HONOR_NANS (arg))
9157 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9160 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9161 if (is_ibm_extended)
9163 /* NaN and Inf are encoded in the high-order double value
9164 only. The low-order value is not significant. */
9165 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9168 arg = builtin_save_expr (arg);
9169 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9171 default:
9172 gcc_unreachable ();
9176 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9177 This builtin will generate code to return the appropriate floating
9178 point classification depending on the value of the floating point
9179 number passed in. The possible return values must be supplied as
9180 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9181 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9182 one floating point argument which is "type generic". */
9184 static tree
9185 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9187 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9188 arg, type, res, tmp;
9189 machine_mode mode;
9190 REAL_VALUE_TYPE r;
9191 char buf[128];
9193 /* Verify the required arguments in the original call. */
9194 if (nargs != 6
9195 || !validate_arg (args[0], INTEGER_TYPE)
9196 || !validate_arg (args[1], INTEGER_TYPE)
9197 || !validate_arg (args[2], INTEGER_TYPE)
9198 || !validate_arg (args[3], INTEGER_TYPE)
9199 || !validate_arg (args[4], INTEGER_TYPE)
9200 || !validate_arg (args[5], REAL_TYPE))
9201 return NULL_TREE;
9203 fp_nan = args[0];
9204 fp_infinite = args[1];
9205 fp_normal = args[2];
9206 fp_subnormal = args[3];
9207 fp_zero = args[4];
9208 arg = args[5];
9209 type = TREE_TYPE (arg);
9210 mode = TYPE_MODE (type);
9211 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9213 /* fpclassify(x) ->
9214 isnan(x) ? FP_NAN :
9215 (fabs(x) == Inf ? FP_INFINITE :
9216 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9217 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9219 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9220 build_real (type, dconst0));
9221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9222 tmp, fp_zero, fp_subnormal);
9224 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9225 real_from_string (&r, buf);
9226 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9227 arg, build_real (type, r));
9228 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9230 if (HONOR_INFINITIES (mode))
9232 real_inf (&r);
9233 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9234 build_real (type, r));
9235 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9236 fp_infinite, res);
9239 if (HONOR_NANS (mode))
9241 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9242 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9245 return res;
9248 /* Fold a call to an unordered comparison function such as
9249 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9250 being called and ARG0 and ARG1 are the arguments for the call.
9251 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9252 the opposite of the desired result. UNORDERED_CODE is used
9253 for modes that can hold NaNs and ORDERED_CODE is used for
9254 the rest. */
9256 static tree
9257 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9258 enum tree_code unordered_code,
9259 enum tree_code ordered_code)
9261 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9262 enum tree_code code;
9263 tree type0, type1;
9264 enum tree_code code0, code1;
9265 tree cmp_type = NULL_TREE;
9267 type0 = TREE_TYPE (arg0);
9268 type1 = TREE_TYPE (arg1);
9270 code0 = TREE_CODE (type0);
9271 code1 = TREE_CODE (type1);
9273 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9274 /* Choose the wider of two real types. */
9275 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9276 ? type0 : type1;
9277 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9278 cmp_type = type0;
9279 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9280 cmp_type = type1;
9282 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9283 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9285 if (unordered_code == UNORDERED_EXPR)
9287 if (!HONOR_NANS (arg0))
9288 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9289 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9292 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9293 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9294 fold_build2_loc (loc, code, type, arg0, arg1));
9297 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9298 arithmetics if it can never overflow, or into internal functions that
9299 return both result of arithmetics and overflowed boolean flag in
9300 a complex integer result, or some other check for overflow.
9301 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9302 checking part of that. */
9304 static tree
9305 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9306 tree arg0, tree arg1, tree arg2)
9308 enum internal_fn ifn = IFN_LAST;
9309 /* The code of the expression corresponding to the built-in. */
9310 enum tree_code opcode = ERROR_MARK;
9311 bool ovf_only = false;
9313 switch (fcode)
9315 case BUILT_IN_ADD_OVERFLOW_P:
9316 ovf_only = true;
9317 /* FALLTHRU */
9318 case BUILT_IN_ADD_OVERFLOW:
9319 case BUILT_IN_SADD_OVERFLOW:
9320 case BUILT_IN_SADDL_OVERFLOW:
9321 case BUILT_IN_SADDLL_OVERFLOW:
9322 case BUILT_IN_UADD_OVERFLOW:
9323 case BUILT_IN_UADDL_OVERFLOW:
9324 case BUILT_IN_UADDLL_OVERFLOW:
9325 opcode = PLUS_EXPR;
9326 ifn = IFN_ADD_OVERFLOW;
9327 break;
9328 case BUILT_IN_SUB_OVERFLOW_P:
9329 ovf_only = true;
9330 /* FALLTHRU */
9331 case BUILT_IN_SUB_OVERFLOW:
9332 case BUILT_IN_SSUB_OVERFLOW:
9333 case BUILT_IN_SSUBL_OVERFLOW:
9334 case BUILT_IN_SSUBLL_OVERFLOW:
9335 case BUILT_IN_USUB_OVERFLOW:
9336 case BUILT_IN_USUBL_OVERFLOW:
9337 case BUILT_IN_USUBLL_OVERFLOW:
9338 opcode = MINUS_EXPR;
9339 ifn = IFN_SUB_OVERFLOW;
9340 break;
9341 case BUILT_IN_MUL_OVERFLOW_P:
9342 ovf_only = true;
9343 /* FALLTHRU */
9344 case BUILT_IN_MUL_OVERFLOW:
9345 case BUILT_IN_SMUL_OVERFLOW:
9346 case BUILT_IN_SMULL_OVERFLOW:
9347 case BUILT_IN_SMULLL_OVERFLOW:
9348 case BUILT_IN_UMUL_OVERFLOW:
9349 case BUILT_IN_UMULL_OVERFLOW:
9350 case BUILT_IN_UMULLL_OVERFLOW:
9351 opcode = MULT_EXPR;
9352 ifn = IFN_MUL_OVERFLOW;
9353 break;
9354 default:
9355 gcc_unreachable ();
9358 /* For the "generic" overloads, the first two arguments can have different
9359 types and the last argument determines the target type to use to check
9360 for overflow. The arguments of the other overloads all have the same
9361 type. */
9362 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9364 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9365 arguments are constant, attempt to fold the built-in call into a constant
9366 expression indicating whether or not it detected an overflow. */
9367 if (ovf_only
9368 && TREE_CODE (arg0) == INTEGER_CST
9369 && TREE_CODE (arg1) == INTEGER_CST)
9370 /* Perform the computation in the target type and check for overflow. */
9371 return omit_one_operand_loc (loc, boolean_type_node,
9372 arith_overflowed_p (opcode, type, arg0, arg1)
9373 ? boolean_true_node : boolean_false_node,
9374 arg2);
9376 tree intres, ovfres;
9377 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9379 intres = fold_binary_loc (loc, opcode, type,
9380 fold_convert_loc (loc, type, arg0),
9381 fold_convert_loc (loc, type, arg1));
9382 if (TREE_OVERFLOW (intres))
9383 intres = drop_tree_overflow (intres);
9384 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9385 ? boolean_true_node : boolean_false_node);
9387 else
9389 tree ctype = build_complex_type (type);
9390 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9391 arg0, arg1);
9392 tree tgt = save_expr (call);
9393 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9394 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9395 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9398 if (ovf_only)
9399 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9401 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9402 tree store
9403 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9404 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9407 /* Fold a call to __builtin_FILE to a constant string. */
9409 static inline tree
9410 fold_builtin_FILE (location_t loc)
9412 if (const char *fname = LOCATION_FILE (loc))
9414 /* The documentation says this builtin is equivalent to the preprocessor
9415 __FILE__ macro so it appears appropriate to use the same file prefix
9416 mappings. */
9417 fname = remap_macro_filename (fname);
9418 return build_string_literal (strlen (fname) + 1, fname);
9421 return build_string_literal (1, "");
9424 /* Fold a call to __builtin_FUNCTION to a constant string. */
9426 static inline tree
9427 fold_builtin_FUNCTION ()
9429 const char *name = "";
9431 if (current_function_decl)
9432 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9434 return build_string_literal (strlen (name) + 1, name);
9437 /* Fold a call to __builtin_LINE to an integer constant. */
9439 static inline tree
9440 fold_builtin_LINE (location_t loc, tree type)
9442 return build_int_cst (type, LOCATION_LINE (loc));
9445 /* Fold a call to built-in function FNDECL with 0 arguments.
9446 This function returns NULL_TREE if no simplification was possible. */
9448 static tree
9449 fold_builtin_0 (location_t loc, tree fndecl)
9451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9453 switch (fcode)
9455 case BUILT_IN_FILE:
9456 return fold_builtin_FILE (loc);
9458 case BUILT_IN_FUNCTION:
9459 return fold_builtin_FUNCTION ();
9461 case BUILT_IN_LINE:
9462 return fold_builtin_LINE (loc, type);
9464 CASE_FLT_FN (BUILT_IN_INF):
9465 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9466 case BUILT_IN_INFD32:
9467 case BUILT_IN_INFD64:
9468 case BUILT_IN_INFD128:
9469 return fold_builtin_inf (loc, type, true);
9471 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9472 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9473 return fold_builtin_inf (loc, type, false);
9475 case BUILT_IN_CLASSIFY_TYPE:
9476 return fold_builtin_classify_type (NULL_TREE);
9478 default:
9479 break;
9481 return NULL_TREE;
9484 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9485 This function returns NULL_TREE if no simplification was possible. */
9487 static tree
9488 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9490 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9491 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9493 if (TREE_CODE (arg0) == ERROR_MARK)
9494 return NULL_TREE;
9496 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9497 return ret;
9499 switch (fcode)
9501 case BUILT_IN_CONSTANT_P:
9503 tree val = fold_builtin_constant_p (arg0);
9505 /* Gimplification will pull the CALL_EXPR for the builtin out of
9506 an if condition. When not optimizing, we'll not CSE it back.
9507 To avoid link error types of regressions, return false now. */
9508 if (!val && !optimize)
9509 val = integer_zero_node;
9511 return val;
9514 case BUILT_IN_CLASSIFY_TYPE:
9515 return fold_builtin_classify_type (arg0);
9517 case BUILT_IN_STRLEN:
9518 return fold_builtin_strlen (loc, type, arg0);
9520 CASE_FLT_FN (BUILT_IN_FABS):
9521 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9522 case BUILT_IN_FABSD32:
9523 case BUILT_IN_FABSD64:
9524 case BUILT_IN_FABSD128:
9525 return fold_builtin_fabs (loc, arg0, type);
9527 case BUILT_IN_ABS:
9528 case BUILT_IN_LABS:
9529 case BUILT_IN_LLABS:
9530 case BUILT_IN_IMAXABS:
9531 return fold_builtin_abs (loc, arg0, type);
9533 CASE_FLT_FN (BUILT_IN_CONJ):
9534 if (validate_arg (arg0, COMPLEX_TYPE)
9535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9536 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9537 break;
9539 CASE_FLT_FN (BUILT_IN_CREAL):
9540 if (validate_arg (arg0, COMPLEX_TYPE)
9541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9542 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9543 break;
9545 CASE_FLT_FN (BUILT_IN_CIMAG):
9546 if (validate_arg (arg0, COMPLEX_TYPE)
9547 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9548 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9549 break;
9551 CASE_FLT_FN (BUILT_IN_CARG):
9552 return fold_builtin_carg (loc, arg0, type);
9554 case BUILT_IN_ISASCII:
9555 return fold_builtin_isascii (loc, arg0);
9557 case BUILT_IN_TOASCII:
9558 return fold_builtin_toascii (loc, arg0);
9560 case BUILT_IN_ISDIGIT:
9561 return fold_builtin_isdigit (loc, arg0);
9563 CASE_FLT_FN (BUILT_IN_FINITE):
9564 case BUILT_IN_FINITED32:
9565 case BUILT_IN_FINITED64:
9566 case BUILT_IN_FINITED128:
9567 case BUILT_IN_ISFINITE:
9569 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9570 if (ret)
9571 return ret;
9572 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9575 CASE_FLT_FN (BUILT_IN_ISINF):
9576 case BUILT_IN_ISINFD32:
9577 case BUILT_IN_ISINFD64:
9578 case BUILT_IN_ISINFD128:
9580 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9581 if (ret)
9582 return ret;
9583 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9586 case BUILT_IN_ISNORMAL:
9587 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9589 case BUILT_IN_ISINF_SIGN:
9590 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9592 CASE_FLT_FN (BUILT_IN_ISNAN):
9593 case BUILT_IN_ISNAND32:
9594 case BUILT_IN_ISNAND64:
9595 case BUILT_IN_ISNAND128:
9596 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9598 case BUILT_IN_FREE:
9599 if (integer_zerop (arg0))
9600 return build_empty_stmt (loc);
9601 break;
9603 default:
9604 break;
9607 return NULL_TREE;
9611 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9612 This function returns NULL_TREE if no simplification was possible. */
9614 static tree
9615 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9620 if (TREE_CODE (arg0) == ERROR_MARK
9621 || TREE_CODE (arg1) == ERROR_MARK)
9622 return NULL_TREE;
9624 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9625 return ret;
9627 switch (fcode)
9629 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9630 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9631 if (validate_arg (arg0, REAL_TYPE)
9632 && validate_arg (arg1, POINTER_TYPE))
9633 return do_mpfr_lgamma_r (arg0, arg1, type);
9634 break;
9636 CASE_FLT_FN (BUILT_IN_FREXP):
9637 return fold_builtin_frexp (loc, arg0, arg1, type);
9639 CASE_FLT_FN (BUILT_IN_MODF):
9640 return fold_builtin_modf (loc, arg0, arg1, type);
9642 case BUILT_IN_STRSPN:
9643 return fold_builtin_strspn (loc, arg0, arg1);
9645 case BUILT_IN_STRCSPN:
9646 return fold_builtin_strcspn (loc, arg0, arg1);
9648 case BUILT_IN_STRPBRK:
9649 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9651 case BUILT_IN_EXPECT:
9652 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9654 case BUILT_IN_ISGREATER:
9655 return fold_builtin_unordered_cmp (loc, fndecl,
9656 arg0, arg1, UNLE_EXPR, LE_EXPR);
9657 case BUILT_IN_ISGREATEREQUAL:
9658 return fold_builtin_unordered_cmp (loc, fndecl,
9659 arg0, arg1, UNLT_EXPR, LT_EXPR);
9660 case BUILT_IN_ISLESS:
9661 return fold_builtin_unordered_cmp (loc, fndecl,
9662 arg0, arg1, UNGE_EXPR, GE_EXPR);
9663 case BUILT_IN_ISLESSEQUAL:
9664 return fold_builtin_unordered_cmp (loc, fndecl,
9665 arg0, arg1, UNGT_EXPR, GT_EXPR);
9666 case BUILT_IN_ISLESSGREATER:
9667 return fold_builtin_unordered_cmp (loc, fndecl,
9668 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9669 case BUILT_IN_ISUNORDERED:
9670 return fold_builtin_unordered_cmp (loc, fndecl,
9671 arg0, arg1, UNORDERED_EXPR,
9672 NOP_EXPR);
9674 /* We do the folding for va_start in the expander. */
9675 case BUILT_IN_VA_START:
9676 break;
9678 case BUILT_IN_OBJECT_SIZE:
9679 return fold_builtin_object_size (arg0, arg1);
9681 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9682 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9684 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9685 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9687 default:
9688 break;
9690 return NULL_TREE;
9693 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9694 and ARG2.
9695 This function returns NULL_TREE if no simplification was possible. */
9697 static tree
9698 fold_builtin_3 (location_t loc, tree fndecl,
9699 tree arg0, tree arg1, tree arg2)
9701 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9702 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9704 if (TREE_CODE (arg0) == ERROR_MARK
9705 || TREE_CODE (arg1) == ERROR_MARK
9706 || TREE_CODE (arg2) == ERROR_MARK)
9707 return NULL_TREE;
9709 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9710 arg0, arg1, arg2))
9711 return ret;
9713 switch (fcode)
9716 CASE_FLT_FN (BUILT_IN_SINCOS):
9717 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9719 CASE_FLT_FN (BUILT_IN_REMQUO):
9720 if (validate_arg (arg0, REAL_TYPE)
9721 && validate_arg (arg1, REAL_TYPE)
9722 && validate_arg (arg2, POINTER_TYPE))
9723 return do_mpfr_remquo (arg0, arg1, arg2);
9724 break;
9726 case BUILT_IN_MEMCMP:
9727 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9729 case BUILT_IN_EXPECT:
9730 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9732 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9733 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9735 case BUILT_IN_ADD_OVERFLOW:
9736 case BUILT_IN_SUB_OVERFLOW:
9737 case BUILT_IN_MUL_OVERFLOW:
9738 case BUILT_IN_ADD_OVERFLOW_P:
9739 case BUILT_IN_SUB_OVERFLOW_P:
9740 case BUILT_IN_MUL_OVERFLOW_P:
9741 case BUILT_IN_SADD_OVERFLOW:
9742 case BUILT_IN_SADDL_OVERFLOW:
9743 case BUILT_IN_SADDLL_OVERFLOW:
9744 case BUILT_IN_SSUB_OVERFLOW:
9745 case BUILT_IN_SSUBL_OVERFLOW:
9746 case BUILT_IN_SSUBLL_OVERFLOW:
9747 case BUILT_IN_SMUL_OVERFLOW:
9748 case BUILT_IN_SMULL_OVERFLOW:
9749 case BUILT_IN_SMULLL_OVERFLOW:
9750 case BUILT_IN_UADD_OVERFLOW:
9751 case BUILT_IN_UADDL_OVERFLOW:
9752 case BUILT_IN_UADDLL_OVERFLOW:
9753 case BUILT_IN_USUB_OVERFLOW:
9754 case BUILT_IN_USUBL_OVERFLOW:
9755 case BUILT_IN_USUBLL_OVERFLOW:
9756 case BUILT_IN_UMUL_OVERFLOW:
9757 case BUILT_IN_UMULL_OVERFLOW:
9758 case BUILT_IN_UMULLL_OVERFLOW:
9759 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9761 default:
9762 break;
9764 return NULL_TREE;
9767 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9768 arguments. IGNORE is true if the result of the
9769 function call is ignored. This function returns NULL_TREE if no
9770 simplification was possible. */
9772 tree
9773 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9775 tree ret = NULL_TREE;
9777 switch (nargs)
9779 case 0:
9780 ret = fold_builtin_0 (loc, fndecl);
9781 break;
9782 case 1:
9783 ret = fold_builtin_1 (loc, fndecl, args[0]);
9784 break;
9785 case 2:
9786 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9787 break;
9788 case 3:
9789 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9790 break;
9791 default:
9792 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9793 break;
9795 if (ret)
9797 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9798 SET_EXPR_LOCATION (ret, loc);
9799 return ret;
9801 return NULL_TREE;
9804 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9805 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9806 of arguments in ARGS to be omitted. OLDNARGS is the number of
9807 elements in ARGS. */
9809 static tree
9810 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9811 int skip, tree fndecl, int n, va_list newargs)
9813 int nargs = oldnargs - skip + n;
9814 tree *buffer;
9816 if (n > 0)
9818 int i, j;
9820 buffer = XALLOCAVEC (tree, nargs);
9821 for (i = 0; i < n; i++)
9822 buffer[i] = va_arg (newargs, tree);
9823 for (j = skip; j < oldnargs; j++, i++)
9824 buffer[i] = args[j];
9826 else
9827 buffer = args + skip;
9829 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9832 /* Return true if FNDECL shouldn't be folded right now.
9833 If a built-in function has an inline attribute always_inline
9834 wrapper, defer folding it after always_inline functions have
9835 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9836 might not be performed. */
9838 bool
9839 avoid_folding_inline_builtin (tree fndecl)
9841 return (DECL_DECLARED_INLINE_P (fndecl)
9842 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9843 && cfun
9844 && !cfun->always_inline_functions_inlined
9845 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9848 /* A wrapper function for builtin folding that prevents warnings for
9849 "statement without effect" and the like, caused by removing the
9850 call node earlier than the warning is generated. */
9852 tree
9853 fold_call_expr (location_t loc, tree exp, bool ignore)
9855 tree ret = NULL_TREE;
9856 tree fndecl = get_callee_fndecl (exp);
9857 if (fndecl && fndecl_built_in_p (fndecl)
9858 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9859 yet. Defer folding until we see all the arguments
9860 (after inlining). */
9861 && !CALL_EXPR_VA_ARG_PACK (exp))
9863 int nargs = call_expr_nargs (exp);
9865 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9866 instead last argument is __builtin_va_arg_pack (). Defer folding
9867 even in that case, until arguments are finalized. */
9868 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9870 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9871 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9872 return NULL_TREE;
9875 if (avoid_folding_inline_builtin (fndecl))
9876 return NULL_TREE;
9878 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9879 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9880 CALL_EXPR_ARGP (exp), ignore);
9881 else
9883 tree *args = CALL_EXPR_ARGP (exp);
9884 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9885 if (ret)
9886 return ret;
9889 return NULL_TREE;
9892 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9893 N arguments are passed in the array ARGARRAY. Return a folded
9894 expression or NULL_TREE if no simplification was possible. */
9896 tree
9897 fold_builtin_call_array (location_t loc, tree,
9898 tree fn,
9899 int n,
9900 tree *argarray)
9902 if (TREE_CODE (fn) != ADDR_EXPR)
9903 return NULL_TREE;
9905 tree fndecl = TREE_OPERAND (fn, 0);
9906 if (TREE_CODE (fndecl) == FUNCTION_DECL
9907 && fndecl_built_in_p (fndecl))
9909 /* If last argument is __builtin_va_arg_pack (), arguments to this
9910 function are not finalized yet. Defer folding until they are. */
9911 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9913 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9914 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9915 return NULL_TREE;
9917 if (avoid_folding_inline_builtin (fndecl))
9918 return NULL_TREE;
9919 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9920 return targetm.fold_builtin (fndecl, n, argarray, false);
9921 else
9922 return fold_builtin_n (loc, fndecl, argarray, n, false);
9925 return NULL_TREE;
9928 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9929 along with N new arguments specified as the "..." parameters. SKIP
9930 is the number of arguments in EXP to be omitted. This function is used
9931 to do varargs-to-varargs transformations. */
9933 static tree
9934 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9936 va_list ap;
9937 tree t;
9939 va_start (ap, n);
9940 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9941 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9942 va_end (ap);
9944 return t;
9947 /* Validate a single argument ARG against a tree code CODE representing
9948 a type. Return true when argument is valid. */
9950 static bool
9951 validate_arg (const_tree arg, enum tree_code code)
9953 if (!arg)
9954 return false;
9955 else if (code == POINTER_TYPE)
9956 return POINTER_TYPE_P (TREE_TYPE (arg));
9957 else if (code == INTEGER_TYPE)
9958 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9959 return code == TREE_CODE (TREE_TYPE (arg));
9962 /* This function validates the types of a function call argument list
9963 against a specified list of tree_codes. If the last specifier is a 0,
9964 that represents an ellipses, otherwise the last specifier must be a
9965 VOID_TYPE.
9967 This is the GIMPLE version of validate_arglist. Eventually we want to
9968 completely convert builtins.c to work from GIMPLEs and the tree based
9969 validate_arglist will then be removed. */
9971 bool
9972 validate_gimple_arglist (const gcall *call, ...)
9974 enum tree_code code;
9975 bool res = 0;
9976 va_list ap;
9977 const_tree arg;
9978 size_t i;
9980 va_start (ap, call);
9981 i = 0;
9985 code = (enum tree_code) va_arg (ap, int);
9986 switch (code)
9988 case 0:
9989 /* This signifies an ellipses, any further arguments are all ok. */
9990 res = true;
9991 goto end;
9992 case VOID_TYPE:
9993 /* This signifies an endlink, if no arguments remain, return
9994 true, otherwise return false. */
9995 res = (i == gimple_call_num_args (call));
9996 goto end;
9997 default:
9998 /* If no parameters remain or the parameter's code does not
9999 match the specified code, return false. Otherwise continue
10000 checking any remaining arguments. */
10001 arg = gimple_call_arg (call, i++);
10002 if (!validate_arg (arg, code))
10003 goto end;
10004 break;
10007 while (1);
10009 /* We need gotos here since we can only have one VA_CLOSE in a
10010 function. */
10011 end: ;
10012 va_end (ap);
10014 return res;
10017 /* Default target-specific builtin expander that does nothing. */
10020 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10021 rtx target ATTRIBUTE_UNUSED,
10022 rtx subtarget ATTRIBUTE_UNUSED,
10023 machine_mode mode ATTRIBUTE_UNUSED,
10024 int ignore ATTRIBUTE_UNUSED)
10026 return NULL_RTX;
10029 /* Returns true is EXP represents data that would potentially reside
10030 in a readonly section. */
10032 bool
10033 readonly_data_expr (tree exp)
10035 STRIP_NOPS (exp);
10037 if (TREE_CODE (exp) != ADDR_EXPR)
10038 return false;
10040 exp = get_base_address (TREE_OPERAND (exp, 0));
10041 if (!exp)
10042 return false;
10044 /* Make sure we call decl_readonly_section only for trees it
10045 can handle (since it returns true for everything it doesn't
10046 understand). */
10047 if (TREE_CODE (exp) == STRING_CST
10048 || TREE_CODE (exp) == CONSTRUCTOR
10049 || (VAR_P (exp) && TREE_STATIC (exp)))
10050 return decl_readonly_section (exp, 0);
10051 else
10052 return false;
10055 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10056 to the call, and TYPE is its return type.
10058 Return NULL_TREE if no simplification was possible, otherwise return the
10059 simplified form of the call as a tree.
10061 The simplified form may be a constant or other expression which
10062 computes the same value, but in a more efficient manner (including
10063 calls to other builtin functions).
10065 The call may contain arguments which need to be evaluated, but
10066 which are not useful to determine the result of the call. In
10067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10068 COMPOUND_EXPR will be an argument which must be evaluated.
10069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10070 COMPOUND_EXPR in the chain will contain the tree for the simplified
10071 form of the builtin function call. */
10073 static tree
10074 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10076 if (!validate_arg (s1, POINTER_TYPE)
10077 || !validate_arg (s2, POINTER_TYPE))
10078 return NULL_TREE;
10079 else
10081 tree fn;
10082 const char *p1, *p2;
10084 p2 = c_getstr (s2);
10085 if (p2 == NULL)
10086 return NULL_TREE;
10088 p1 = c_getstr (s1);
10089 if (p1 != NULL)
10091 const char *r = strpbrk (p1, p2);
10092 tree tem;
10094 if (r == NULL)
10095 return build_int_cst (TREE_TYPE (s1), 0);
10097 /* Return an offset into the constant string argument. */
10098 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10099 return fold_convert_loc (loc, type, tem);
10102 if (p2[0] == '\0')
10103 /* strpbrk(x, "") == NULL.
10104 Evaluate and ignore s1 in case it had side-effects. */
10105 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10107 if (p2[1] != '\0')
10108 return NULL_TREE; /* Really call strpbrk. */
10110 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10111 if (!fn)
10112 return NULL_TREE;
10114 /* New argument list transforming strpbrk(s1, s2) to
10115 strchr(s1, s2[0]). */
10116 return build_call_expr_loc (loc, fn, 2, s1,
10117 build_int_cst (integer_type_node, p2[0]));
10121 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10122 to the call.
10124 Return NULL_TREE if no simplification was possible, otherwise return the
10125 simplified form of the call as a tree.
10127 The simplified form may be a constant or other expression which
10128 computes the same value, but in a more efficient manner (including
10129 calls to other builtin functions).
10131 The call may contain arguments which need to be evaluated, but
10132 which are not useful to determine the result of the call. In
10133 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10134 COMPOUND_EXPR will be an argument which must be evaluated.
10135 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10136 COMPOUND_EXPR in the chain will contain the tree for the simplified
10137 form of the builtin function call. */
10139 static tree
10140 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10142 if (!validate_arg (s1, POINTER_TYPE)
10143 || !validate_arg (s2, POINTER_TYPE))
10144 return NULL_TREE;
10145 else
10147 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10149 /* If either argument is "", return NULL_TREE. */
10150 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10151 /* Evaluate and ignore both arguments in case either one has
10152 side-effects. */
10153 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10154 s1, s2);
10155 return NULL_TREE;
10159 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10160 to the call.
10162 Return NULL_TREE if no simplification was possible, otherwise return the
10163 simplified form of the call as a tree.
10165 The simplified form may be a constant or other expression which
10166 computes the same value, but in a more efficient manner (including
10167 calls to other builtin functions).
10169 The call may contain arguments which need to be evaluated, but
10170 which are not useful to determine the result of the call. In
10171 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10172 COMPOUND_EXPR will be an argument which must be evaluated.
10173 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10174 COMPOUND_EXPR in the chain will contain the tree for the simplified
10175 form of the builtin function call. */
10177 static tree
10178 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10180 if (!validate_arg (s1, POINTER_TYPE)
10181 || !validate_arg (s2, POINTER_TYPE))
10182 return NULL_TREE;
10183 else
10185 /* If the first argument is "", return NULL_TREE. */
10186 const char *p1 = c_getstr (s1);
10187 if (p1 && *p1 == '\0')
10189 /* Evaluate and ignore argument s2 in case it has
10190 side-effects. */
10191 return omit_one_operand_loc (loc, size_type_node,
10192 size_zero_node, s2);
10195 /* If the second argument is "", return __builtin_strlen(s1). */
10196 const char *p2 = c_getstr (s2);
10197 if (p2 && *p2 == '\0')
10199 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10201 /* If the replacement _DECL isn't initialized, don't do the
10202 transformation. */
10203 if (!fn)
10204 return NULL_TREE;
10206 return build_call_expr_loc (loc, fn, 1, s1);
10208 return NULL_TREE;
10212 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10213 produced. False otherwise. This is done so that we don't output the error
10214 or warning twice or three times. */
10216 bool
10217 fold_builtin_next_arg (tree exp, bool va_start_p)
10219 tree fntype = TREE_TYPE (current_function_decl);
10220 int nargs = call_expr_nargs (exp);
10221 tree arg;
10222 /* There is good chance the current input_location points inside the
10223 definition of the va_start macro (perhaps on the token for
10224 builtin) in a system header, so warnings will not be emitted.
10225 Use the location in real source code. */
10226 location_t current_location =
10227 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10228 NULL);
10230 if (!stdarg_p (fntype))
10232 error ("%<va_start%> used in function with fixed arguments");
10233 return true;
10236 if (va_start_p)
10238 if (va_start_p && (nargs != 2))
10240 error ("wrong number of arguments to function %<va_start%>");
10241 return true;
10243 arg = CALL_EXPR_ARG (exp, 1);
10245 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10246 when we checked the arguments and if needed issued a warning. */
10247 else
10249 if (nargs == 0)
10251 /* Evidently an out of date version of <stdarg.h>; can't validate
10252 va_start's second argument, but can still work as intended. */
10253 warning_at (current_location,
10254 OPT_Wvarargs,
10255 "%<__builtin_next_arg%> called without an argument");
10256 return true;
10258 else if (nargs > 1)
10260 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10261 return true;
10263 arg = CALL_EXPR_ARG (exp, 0);
10266 if (TREE_CODE (arg) == SSA_NAME)
10267 arg = SSA_NAME_VAR (arg);
10269 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10270 or __builtin_next_arg (0) the first time we see it, after checking
10271 the arguments and if needed issuing a warning. */
10272 if (!integer_zerop (arg))
10274 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10276 /* Strip off all nops for the sake of the comparison. This
10277 is not quite the same as STRIP_NOPS. It does more.
10278 We must also strip off INDIRECT_EXPR for C++ reference
10279 parameters. */
10280 while (CONVERT_EXPR_P (arg)
10281 || TREE_CODE (arg) == INDIRECT_REF)
10282 arg = TREE_OPERAND (arg, 0);
10283 if (arg != last_parm)
10285 /* FIXME: Sometimes with the tree optimizers we can get the
10286 not the last argument even though the user used the last
10287 argument. We just warn and set the arg to be the last
10288 argument so that we will get wrong-code because of
10289 it. */
10290 warning_at (current_location,
10291 OPT_Wvarargs,
10292 "second parameter of %<va_start%> not last named argument");
10295 /* Undefined by C99 7.15.1.4p4 (va_start):
10296 "If the parameter parmN is declared with the register storage
10297 class, with a function or array type, or with a type that is
10298 not compatible with the type that results after application of
10299 the default argument promotions, the behavior is undefined."
10301 else if (DECL_REGISTER (arg))
10303 warning_at (current_location,
10304 OPT_Wvarargs,
10305 "undefined behavior when second parameter of "
10306 "%<va_start%> is declared with %<register%> storage");
10309 /* We want to verify the second parameter just once before the tree
10310 optimizers are run and then avoid keeping it in the tree,
10311 as otherwise we could warn even for correct code like:
10312 void foo (int i, ...)
10313 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10314 if (va_start_p)
10315 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10316 else
10317 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10319 return false;
10323 /* Expand a call EXP to __builtin_object_size. */
10325 static rtx
10326 expand_builtin_object_size (tree exp)
10328 tree ost;
10329 int object_size_type;
10330 tree fndecl = get_callee_fndecl (exp);
10332 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10334 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10335 exp, fndecl);
10336 expand_builtin_trap ();
10337 return const0_rtx;
10340 ost = CALL_EXPR_ARG (exp, 1);
10341 STRIP_NOPS (ost);
10343 if (TREE_CODE (ost) != INTEGER_CST
10344 || tree_int_cst_sgn (ost) < 0
10345 || compare_tree_int (ost, 3) > 0)
10347 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10348 exp, fndecl);
10349 expand_builtin_trap ();
10350 return const0_rtx;
10353 object_size_type = tree_to_shwi (ost);
10355 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10358 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10359 FCODE is the BUILT_IN_* to use.
10360 Return NULL_RTX if we failed; the caller should emit a normal call,
10361 otherwise try to get the result in TARGET, if convenient (and in
10362 mode MODE if that's convenient). */
10364 static rtx
10365 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10366 enum built_in_function fcode)
10368 if (!validate_arglist (exp,
10369 POINTER_TYPE,
10370 fcode == BUILT_IN_MEMSET_CHK
10371 ? INTEGER_TYPE : POINTER_TYPE,
10372 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10373 return NULL_RTX;
10375 tree dest = CALL_EXPR_ARG (exp, 0);
10376 tree src = CALL_EXPR_ARG (exp, 1);
10377 tree len = CALL_EXPR_ARG (exp, 2);
10378 tree size = CALL_EXPR_ARG (exp, 3);
10380 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10381 /*str=*/NULL_TREE, size);
10383 if (!tree_fits_uhwi_p (size))
10384 return NULL_RTX;
10386 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10388 /* Avoid transforming the checking call to an ordinary one when
10389 an overflow has been detected or when the call couldn't be
10390 validated because the size is not constant. */
10391 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10392 return NULL_RTX;
10394 tree fn = NULL_TREE;
10395 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10396 mem{cpy,pcpy,move,set} is available. */
10397 switch (fcode)
10399 case BUILT_IN_MEMCPY_CHK:
10400 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10401 break;
10402 case BUILT_IN_MEMPCPY_CHK:
10403 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10404 break;
10405 case BUILT_IN_MEMMOVE_CHK:
10406 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10407 break;
10408 case BUILT_IN_MEMSET_CHK:
10409 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10410 break;
10411 default:
10412 break;
10415 if (! fn)
10416 return NULL_RTX;
10418 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10419 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10420 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10421 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10423 else if (fcode == BUILT_IN_MEMSET_CHK)
10424 return NULL_RTX;
10425 else
10427 unsigned int dest_align = get_pointer_alignment (dest);
10429 /* If DEST is not a pointer type, call the normal function. */
10430 if (dest_align == 0)
10431 return NULL_RTX;
10433 /* If SRC and DEST are the same (and not volatile), do nothing. */
10434 if (operand_equal_p (src, dest, 0))
10436 tree expr;
10438 if (fcode != BUILT_IN_MEMPCPY_CHK)
10440 /* Evaluate and ignore LEN in case it has side-effects. */
10441 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10442 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10445 expr = fold_build_pointer_plus (dest, len);
10446 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10449 /* __memmove_chk special case. */
10450 if (fcode == BUILT_IN_MEMMOVE_CHK)
10452 unsigned int src_align = get_pointer_alignment (src);
10454 if (src_align == 0)
10455 return NULL_RTX;
10457 /* If src is categorized for a readonly section we can use
10458 normal __memcpy_chk. */
10459 if (readonly_data_expr (src))
10461 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10462 if (!fn)
10463 return NULL_RTX;
10464 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10465 dest, src, len, size);
10466 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10467 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10468 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10471 return NULL_RTX;
10475 /* Emit warning if a buffer overflow is detected at compile time. */
10477 static void
10478 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10480 /* The source string. */
10481 tree srcstr = NULL_TREE;
10482 /* The size of the destination object. */
10483 tree objsize = NULL_TREE;
10484 /* The string that is being concatenated with (as in __strcat_chk)
10485 or null if it isn't. */
10486 tree catstr = NULL_TREE;
10487 /* The maximum length of the source sequence in a bounded operation
10488 (such as __strncat_chk) or null if the operation isn't bounded
10489 (such as __strcat_chk). */
10490 tree maxread = NULL_TREE;
10491 /* The exact size of the access (such as in __strncpy_chk). */
10492 tree size = NULL_TREE;
10494 switch (fcode)
10496 case BUILT_IN_STRCPY_CHK:
10497 case BUILT_IN_STPCPY_CHK:
10498 srcstr = CALL_EXPR_ARG (exp, 1);
10499 objsize = CALL_EXPR_ARG (exp, 2);
10500 break;
10502 case BUILT_IN_STRCAT_CHK:
10503 /* For __strcat_chk the warning will be emitted only if overflowing
10504 by at least strlen (dest) + 1 bytes. */
10505 catstr = CALL_EXPR_ARG (exp, 0);
10506 srcstr = CALL_EXPR_ARG (exp, 1);
10507 objsize = CALL_EXPR_ARG (exp, 2);
10508 break;
10510 case BUILT_IN_STRNCAT_CHK:
10511 catstr = CALL_EXPR_ARG (exp, 0);
10512 srcstr = CALL_EXPR_ARG (exp, 1);
10513 maxread = CALL_EXPR_ARG (exp, 2);
10514 objsize = CALL_EXPR_ARG (exp, 3);
10515 break;
10517 case BUILT_IN_STRNCPY_CHK:
10518 case BUILT_IN_STPNCPY_CHK:
10519 srcstr = CALL_EXPR_ARG (exp, 1);
10520 size = CALL_EXPR_ARG (exp, 2);
10521 objsize = CALL_EXPR_ARG (exp, 3);
10522 break;
10524 case BUILT_IN_SNPRINTF_CHK:
10525 case BUILT_IN_VSNPRINTF_CHK:
10526 maxread = CALL_EXPR_ARG (exp, 1);
10527 objsize = CALL_EXPR_ARG (exp, 3);
10528 break;
10529 default:
10530 gcc_unreachable ();
10533 if (catstr && maxread)
10535 /* Check __strncat_chk. There is no way to determine the length
10536 of the string to which the source string is being appended so
10537 just warn when the length of the source string is not known. */
10538 check_strncat_sizes (exp, objsize);
10539 return;
10542 /* The destination argument is the first one for all built-ins above. */
10543 tree dst = CALL_EXPR_ARG (exp, 0);
10545 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10548 /* Emit warning if a buffer overflow is detected at compile time
10549 in __sprintf_chk/__vsprintf_chk calls. */
10551 static void
10552 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10554 tree size, len, fmt;
10555 const char *fmt_str;
10556 int nargs = call_expr_nargs (exp);
10558 /* Verify the required arguments in the original call. */
10560 if (nargs < 4)
10561 return;
10562 size = CALL_EXPR_ARG (exp, 2);
10563 fmt = CALL_EXPR_ARG (exp, 3);
10565 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10566 return;
10568 /* Check whether the format is a literal string constant. */
10569 fmt_str = c_getstr (fmt);
10570 if (fmt_str == NULL)
10571 return;
10573 if (!init_target_chars ())
10574 return;
10576 /* If the format doesn't contain % args or %%, we know its size. */
10577 if (strchr (fmt_str, target_percent) == 0)
10578 len = build_int_cstu (size_type_node, strlen (fmt_str));
10579 /* If the format is "%s" and first ... argument is a string literal,
10580 we know it too. */
10581 else if (fcode == BUILT_IN_SPRINTF_CHK
10582 && strcmp (fmt_str, target_percent_s) == 0)
10584 tree arg;
10586 if (nargs < 5)
10587 return;
10588 arg = CALL_EXPR_ARG (exp, 4);
10589 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10590 return;
10592 len = c_strlen (arg, 1);
10593 if (!len || ! tree_fits_uhwi_p (len))
10594 return;
10596 else
10597 return;
10599 /* Add one for the terminating nul. */
10600 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10602 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10603 /*maxread=*/NULL_TREE, len, size);
10606 /* Emit warning if a free is called with address of a variable. */
10608 static void
10609 maybe_emit_free_warning (tree exp)
10611 if (call_expr_nargs (exp) != 1)
10612 return;
10614 tree arg = CALL_EXPR_ARG (exp, 0);
10616 STRIP_NOPS (arg);
10617 if (TREE_CODE (arg) != ADDR_EXPR)
10618 return;
10620 arg = get_base_address (TREE_OPERAND (arg, 0));
10621 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10622 return;
10624 if (SSA_VAR_P (arg))
10625 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10626 "%Kattempt to free a non-heap object %qD", exp, arg);
10627 else
10628 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10629 "%Kattempt to free a non-heap object", exp);
10632 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10633 if possible. */
10635 static tree
10636 fold_builtin_object_size (tree ptr, tree ost)
10638 unsigned HOST_WIDE_INT bytes;
10639 int object_size_type;
10641 if (!validate_arg (ptr, POINTER_TYPE)
10642 || !validate_arg (ost, INTEGER_TYPE))
10643 return NULL_TREE;
10645 STRIP_NOPS (ost);
10647 if (TREE_CODE (ost) != INTEGER_CST
10648 || tree_int_cst_sgn (ost) < 0
10649 || compare_tree_int (ost, 3) > 0)
10650 return NULL_TREE;
10652 object_size_type = tree_to_shwi (ost);
10654 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10655 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10656 and (size_t) 0 for types 2 and 3. */
10657 if (TREE_SIDE_EFFECTS (ptr))
10658 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10660 if (TREE_CODE (ptr) == ADDR_EXPR)
10662 compute_builtin_object_size (ptr, object_size_type, &bytes);
10663 if (wi::fits_to_tree_p (bytes, size_type_node))
10664 return build_int_cstu (size_type_node, bytes);
10666 else if (TREE_CODE (ptr) == SSA_NAME)
10668 /* If object size is not known yet, delay folding until
10669 later. Maybe subsequent passes will help determining
10670 it. */
10671 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10672 && wi::fits_to_tree_p (bytes, size_type_node))
10673 return build_int_cstu (size_type_node, bytes);
10676 return NULL_TREE;
10679 /* Builtins with folding operations that operate on "..." arguments
10680 need special handling; we need to store the arguments in a convenient
10681 data structure before attempting any folding. Fortunately there are
10682 only a few builtins that fall into this category. FNDECL is the
10683 function, EXP is the CALL_EXPR for the call. */
10685 static tree
10686 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10688 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10689 tree ret = NULL_TREE;
10691 switch (fcode)
10693 case BUILT_IN_FPCLASSIFY:
10694 ret = fold_builtin_fpclassify (loc, args, nargs);
10695 break;
10697 default:
10698 break;
10700 if (ret)
10702 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10703 SET_EXPR_LOCATION (ret, loc);
10704 TREE_NO_WARNING (ret) = 1;
10705 return ret;
10707 return NULL_TREE;
10710 /* Initialize format string characters in the target charset. */
10712 bool
10713 init_target_chars (void)
10715 static bool init;
10716 if (!init)
10718 target_newline = lang_hooks.to_target_charset ('\n');
10719 target_percent = lang_hooks.to_target_charset ('%');
10720 target_c = lang_hooks.to_target_charset ('c');
10721 target_s = lang_hooks.to_target_charset ('s');
10722 if (target_newline == 0 || target_percent == 0 || target_c == 0
10723 || target_s == 0)
10724 return false;
10726 target_percent_c[0] = target_percent;
10727 target_percent_c[1] = target_c;
10728 target_percent_c[2] = '\0';
10730 target_percent_s[0] = target_percent;
10731 target_percent_s[1] = target_s;
10732 target_percent_s[2] = '\0';
10734 target_percent_s_newline[0] = target_percent;
10735 target_percent_s_newline[1] = target_s;
10736 target_percent_s_newline[2] = target_newline;
10737 target_percent_s_newline[3] = '\0';
10739 init = true;
10741 return true;
10744 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10745 and no overflow/underflow occurred. INEXACT is true if M was not
10746 exactly calculated. TYPE is the tree type for the result. This
10747 function assumes that you cleared the MPFR flags and then
10748 calculated M to see if anything subsequently set a flag prior to
10749 entering this function. Return NULL_TREE if any checks fail. */
10751 static tree
10752 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10754 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10755 overflow/underflow occurred. If -frounding-math, proceed iff the
10756 result of calling FUNC was exact. */
10757 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10758 && (!flag_rounding_math || !inexact))
10760 REAL_VALUE_TYPE rr;
10762 real_from_mpfr (&rr, m, type, GMP_RNDN);
10763 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10764 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10765 but the mpft_t is not, then we underflowed in the
10766 conversion. */
10767 if (real_isfinite (&rr)
10768 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10770 REAL_VALUE_TYPE rmode;
10772 real_convert (&rmode, TYPE_MODE (type), &rr);
10773 /* Proceed iff the specified mode can hold the value. */
10774 if (real_identical (&rmode, &rr))
10775 return build_real (type, rmode);
10778 return NULL_TREE;
10781 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10782 number and no overflow/underflow occurred. INEXACT is true if M
10783 was not exactly calculated. TYPE is the tree type for the result.
10784 This function assumes that you cleared the MPFR flags and then
10785 calculated M to see if anything subsequently set a flag prior to
10786 entering this function. Return NULL_TREE if any checks fail, if
10787 FORCE_CONVERT is true, then bypass the checks. */
10789 static tree
10790 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10792 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10793 overflow/underflow occurred. If -frounding-math, proceed iff the
10794 result of calling FUNC was exact. */
10795 if (force_convert
10796 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10797 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10798 && (!flag_rounding_math || !inexact)))
10800 REAL_VALUE_TYPE re, im;
10802 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10803 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10804 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10805 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10806 but the mpft_t is not, then we underflowed in the
10807 conversion. */
10808 if (force_convert
10809 || (real_isfinite (&re) && real_isfinite (&im)
10810 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10811 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10813 REAL_VALUE_TYPE re_mode, im_mode;
10815 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10816 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10817 /* Proceed iff the specified mode can hold the value. */
10818 if (force_convert
10819 || (real_identical (&re_mode, &re)
10820 && real_identical (&im_mode, &im)))
10821 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10822 build_real (TREE_TYPE (type), im_mode));
10825 return NULL_TREE;
10828 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10829 the pointer *(ARG_QUO) and return the result. The type is taken
10830 from the type of ARG0 and is used for setting the precision of the
10831 calculation and results. */
10833 static tree
10834 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10836 tree const type = TREE_TYPE (arg0);
10837 tree result = NULL_TREE;
10839 STRIP_NOPS (arg0);
10840 STRIP_NOPS (arg1);
10842 /* To proceed, MPFR must exactly represent the target floating point
10843 format, which only happens when the target base equals two. */
10844 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10845 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10846 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10848 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10849 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10851 if (real_isfinite (ra0) && real_isfinite (ra1))
10853 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10854 const int prec = fmt->p;
10855 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10856 tree result_rem;
10857 long integer_quo;
10858 mpfr_t m0, m1;
10860 mpfr_inits2 (prec, m0, m1, NULL);
10861 mpfr_from_real (m0, ra0, GMP_RNDN);
10862 mpfr_from_real (m1, ra1, GMP_RNDN);
10863 mpfr_clear_flags ();
10864 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10865 /* Remquo is independent of the rounding mode, so pass
10866 inexact=0 to do_mpfr_ckconv(). */
10867 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10868 mpfr_clears (m0, m1, NULL);
10869 if (result_rem)
10871 /* MPFR calculates quo in the host's long so it may
10872 return more bits in quo than the target int can hold
10873 if sizeof(host long) > sizeof(target int). This can
10874 happen even for native compilers in LP64 mode. In
10875 these cases, modulo the quo value with the largest
10876 number that the target int can hold while leaving one
10877 bit for the sign. */
10878 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10879 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10881 /* Dereference the quo pointer argument. */
10882 arg_quo = build_fold_indirect_ref (arg_quo);
10883 /* Proceed iff a valid pointer type was passed in. */
10884 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10886 /* Set the value. */
10887 tree result_quo
10888 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10889 build_int_cst (TREE_TYPE (arg_quo),
10890 integer_quo));
10891 TREE_SIDE_EFFECTS (result_quo) = 1;
10892 /* Combine the quo assignment with the rem. */
10893 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10894 result_quo, result_rem));
10899 return result;
10902 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10903 resulting value as a tree with type TYPE. The mpfr precision is
10904 set to the precision of TYPE. We assume that this mpfr function
10905 returns zero if the result could be calculated exactly within the
10906 requested precision. In addition, the integer pointer represented
10907 by ARG_SG will be dereferenced and set to the appropriate signgam
10908 (-1,1) value. */
10910 static tree
10911 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10913 tree result = NULL_TREE;
10915 STRIP_NOPS (arg);
10917 /* To proceed, MPFR must exactly represent the target floating point
10918 format, which only happens when the target base equals two. Also
10919 verify ARG is a constant and that ARG_SG is an int pointer. */
10920 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10921 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10922 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10923 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10925 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10927 /* In addition to NaN and Inf, the argument cannot be zero or a
10928 negative integer. */
10929 if (real_isfinite (ra)
10930 && ra->cl != rvc_zero
10931 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10933 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10934 const int prec = fmt->p;
10935 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10936 int inexact, sg;
10937 mpfr_t m;
10938 tree result_lg;
10940 mpfr_init2 (m, prec);
10941 mpfr_from_real (m, ra, GMP_RNDN);
10942 mpfr_clear_flags ();
10943 inexact = mpfr_lgamma (m, &sg, m, rnd);
10944 result_lg = do_mpfr_ckconv (m, type, inexact);
10945 mpfr_clear (m);
10946 if (result_lg)
10948 tree result_sg;
10950 /* Dereference the arg_sg pointer argument. */
10951 arg_sg = build_fold_indirect_ref (arg_sg);
10952 /* Assign the signgam value into *arg_sg. */
10953 result_sg = fold_build2 (MODIFY_EXPR,
10954 TREE_TYPE (arg_sg), arg_sg,
10955 build_int_cst (TREE_TYPE (arg_sg), sg));
10956 TREE_SIDE_EFFECTS (result_sg) = 1;
10957 /* Combine the signgam assignment with the lgamma result. */
10958 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10959 result_sg, result_lg));
10964 return result;
10967 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10968 mpc function FUNC on it and return the resulting value as a tree
10969 with type TYPE. The mpfr precision is set to the precision of
10970 TYPE. We assume that function FUNC returns zero if the result
10971 could be calculated exactly within the requested precision. If
10972 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10973 in the arguments and/or results. */
10975 tree
10976 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10977 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10979 tree result = NULL_TREE;
10981 STRIP_NOPS (arg0);
10982 STRIP_NOPS (arg1);
10984 /* To proceed, MPFR must exactly represent the target floating point
10985 format, which only happens when the target base equals two. */
10986 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10988 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10990 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10992 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10993 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10994 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10995 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10997 if (do_nonfinite
10998 || (real_isfinite (re0) && real_isfinite (im0)
10999 && real_isfinite (re1) && real_isfinite (im1)))
11001 const struct real_format *const fmt =
11002 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11003 const int prec = fmt->p;
11004 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11005 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11006 int inexact;
11007 mpc_t m0, m1;
11009 mpc_init2 (m0, prec);
11010 mpc_init2 (m1, prec);
11011 mpfr_from_real (mpc_realref (m0), re0, rnd);
11012 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11013 mpfr_from_real (mpc_realref (m1), re1, rnd);
11014 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11015 mpfr_clear_flags ();
11016 inexact = func (m0, m0, m1, crnd);
11017 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11018 mpc_clear (m0);
11019 mpc_clear (m1);
11023 return result;
11026 /* A wrapper function for builtin folding that prevents warnings for
11027 "statement without effect" and the like, caused by removing the
11028 call node earlier than the warning is generated. */
11030 tree
11031 fold_call_stmt (gcall *stmt, bool ignore)
11033 tree ret = NULL_TREE;
11034 tree fndecl = gimple_call_fndecl (stmt);
11035 location_t loc = gimple_location (stmt);
11036 if (fndecl && fndecl_built_in_p (fndecl)
11037 && !gimple_call_va_arg_pack_p (stmt))
11039 int nargs = gimple_call_num_args (stmt);
11040 tree *args = (nargs > 0
11041 ? gimple_call_arg_ptr (stmt, 0)
11042 : &error_mark_node);
11044 if (avoid_folding_inline_builtin (fndecl))
11045 return NULL_TREE;
11046 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11048 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11050 else
11052 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11053 if (ret)
11055 /* Propagate location information from original call to
11056 expansion of builtin. Otherwise things like
11057 maybe_emit_chk_warning, that operate on the expansion
11058 of a builtin, will use the wrong location information. */
11059 if (gimple_has_location (stmt))
11061 tree realret = ret;
11062 if (TREE_CODE (ret) == NOP_EXPR)
11063 realret = TREE_OPERAND (ret, 0);
11064 if (CAN_HAVE_LOCATION_P (realret)
11065 && !EXPR_HAS_LOCATION (realret))
11066 SET_EXPR_LOCATION (realret, loc);
11067 return realret;
11069 return ret;
11073 return NULL_TREE;
11076 /* Look up the function in builtin_decl that corresponds to DECL
11077 and set ASMSPEC as its user assembler name. DECL must be a
11078 function decl that declares a builtin. */
11080 void
11081 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11083 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11084 && asmspec != 0);
11086 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11087 set_user_assembler_name (builtin, asmspec);
11089 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11090 && INT_TYPE_SIZE < BITS_PER_WORD)
11092 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11093 set_user_assembler_libfunc ("ffs", asmspec);
11094 set_optab_libfunc (ffs_optab, mode, "ffs");
11098 /* Return true if DECL is a builtin that expands to a constant or similarly
11099 simple code. */
11100 bool
11101 is_simple_builtin (tree decl)
11103 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11104 switch (DECL_FUNCTION_CODE (decl))
11106 /* Builtins that expand to constants. */
11107 case BUILT_IN_CONSTANT_P:
11108 case BUILT_IN_EXPECT:
11109 case BUILT_IN_OBJECT_SIZE:
11110 case BUILT_IN_UNREACHABLE:
11111 /* Simple register moves or loads from stack. */
11112 case BUILT_IN_ASSUME_ALIGNED:
11113 case BUILT_IN_RETURN_ADDRESS:
11114 case BUILT_IN_EXTRACT_RETURN_ADDR:
11115 case BUILT_IN_FROB_RETURN_ADDR:
11116 case BUILT_IN_RETURN:
11117 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11118 case BUILT_IN_FRAME_ADDRESS:
11119 case BUILT_IN_VA_END:
11120 case BUILT_IN_STACK_SAVE:
11121 case BUILT_IN_STACK_RESTORE:
11122 /* Exception state returns or moves registers around. */
11123 case BUILT_IN_EH_FILTER:
11124 case BUILT_IN_EH_POINTER:
11125 case BUILT_IN_EH_COPY_VALUES:
11126 return true;
11128 default:
11129 return false;
11132 return false;
11135 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11136 most probably expanded inline into reasonably simple code. This is a
11137 superset of is_simple_builtin. */
11138 bool
11139 is_inexpensive_builtin (tree decl)
11141 if (!decl)
11142 return false;
11143 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11144 return true;
11145 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11146 switch (DECL_FUNCTION_CODE (decl))
11148 case BUILT_IN_ABS:
11149 CASE_BUILT_IN_ALLOCA:
11150 case BUILT_IN_BSWAP16:
11151 case BUILT_IN_BSWAP32:
11152 case BUILT_IN_BSWAP64:
11153 case BUILT_IN_CLZ:
11154 case BUILT_IN_CLZIMAX:
11155 case BUILT_IN_CLZL:
11156 case BUILT_IN_CLZLL:
11157 case BUILT_IN_CTZ:
11158 case BUILT_IN_CTZIMAX:
11159 case BUILT_IN_CTZL:
11160 case BUILT_IN_CTZLL:
11161 case BUILT_IN_FFS:
11162 case BUILT_IN_FFSIMAX:
11163 case BUILT_IN_FFSL:
11164 case BUILT_IN_FFSLL:
11165 case BUILT_IN_IMAXABS:
11166 case BUILT_IN_FINITE:
11167 case BUILT_IN_FINITEF:
11168 case BUILT_IN_FINITEL:
11169 case BUILT_IN_FINITED32:
11170 case BUILT_IN_FINITED64:
11171 case BUILT_IN_FINITED128:
11172 case BUILT_IN_FPCLASSIFY:
11173 case BUILT_IN_ISFINITE:
11174 case BUILT_IN_ISINF_SIGN:
11175 case BUILT_IN_ISINF:
11176 case BUILT_IN_ISINFF:
11177 case BUILT_IN_ISINFL:
11178 case BUILT_IN_ISINFD32:
11179 case BUILT_IN_ISINFD64:
11180 case BUILT_IN_ISINFD128:
11181 case BUILT_IN_ISNAN:
11182 case BUILT_IN_ISNANF:
11183 case BUILT_IN_ISNANL:
11184 case BUILT_IN_ISNAND32:
11185 case BUILT_IN_ISNAND64:
11186 case BUILT_IN_ISNAND128:
11187 case BUILT_IN_ISNORMAL:
11188 case BUILT_IN_ISGREATER:
11189 case BUILT_IN_ISGREATEREQUAL:
11190 case BUILT_IN_ISLESS:
11191 case BUILT_IN_ISLESSEQUAL:
11192 case BUILT_IN_ISLESSGREATER:
11193 case BUILT_IN_ISUNORDERED:
11194 case BUILT_IN_VA_ARG_PACK:
11195 case BUILT_IN_VA_ARG_PACK_LEN:
11196 case BUILT_IN_VA_COPY:
11197 case BUILT_IN_TRAP:
11198 case BUILT_IN_SAVEREGS:
11199 case BUILT_IN_POPCOUNTL:
11200 case BUILT_IN_POPCOUNTLL:
11201 case BUILT_IN_POPCOUNTIMAX:
11202 case BUILT_IN_POPCOUNT:
11203 case BUILT_IN_PARITYL:
11204 case BUILT_IN_PARITYLL:
11205 case BUILT_IN_PARITYIMAX:
11206 case BUILT_IN_PARITY:
11207 case BUILT_IN_LABS:
11208 case BUILT_IN_LLABS:
11209 case BUILT_IN_PREFETCH:
11210 case BUILT_IN_ACC_ON_DEVICE:
11211 return true;
11213 default:
11214 return is_simple_builtin (decl);
11217 return false;
11220 /* Return true if T is a constant and the value cast to a target char
11221 can be represented by a host char.
11222 Store the casted char constant in *P if so. */
11224 bool
11225 target_char_cst_p (tree t, char *p)
11227 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11228 return false;
11230 *p = (char)tree_to_uhwi (t);
11231 return true;