re PR lto/85574 (LTO bootstapped binaries differ)
[official-gcc.git] / gcc / builtins.c
blobd37d73fc4a0fdd9ff0883ea3adfb385775a8a684
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp,
129 memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
197 /* Return true if NAME starts with __builtin_ or __sync_. */
199 static bool
200 is_builtin_name (const char *name)
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 bool
216 called_as_built_in (tree node)
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
267 else if (TREE_CODE (exp) == LABEL_DECL)
269 else if (TREE_CODE (exp) == CONST_DECL)
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
277 known_alignment = true;
279 else if (DECL_P (exp))
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 if (TMR_INDEX (exp))
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
345 else if (TREE_CODE (exp) == STRING_CST)
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
353 known_alignment = true;
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
374 align = alt_align;
375 known_alignment = false;
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 /* Return the alignment in bits of EXP, an object. */
397 unsigned int
398 get_object_alignment (tree exp)
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
403 get_object_alignment_1 (exp, &align, &bitpos);
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
418 If EXP is not a pointer, false is returned too. */
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
424 STRIP_NOPS (exp);
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
468 else
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
475 else if (TREE_CODE (exp) == INTEGER_CST)
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
495 unsigned int
496 get_pointer_alignment (tree exp)
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
501 get_pointer_alignment_1 (exp, &align, &bitpos);
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
509 return align;
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521 unsigned n;
523 if (eltsize == 1)
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
533 else
535 for (n = 0; n < maxelts; n++)
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
542 return n;
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 if (TREE_NO_WARNING (arg))
553 return;
555 loc = expansion_point_location_if_in_system_header (loc);
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
572 tree
573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
577 c_strlen_data lendata = { };
578 tree len = c_strlen (exp, 1, &lendata);
579 if (len == NULL_TREE && lendata.minlen && lendata.decl)
581 if (size)
583 len = lendata.minlen;
584 if (lendata.off)
586 /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 but not in a SSA_NAME + CST expression. */
588 if (TREE_CODE (lendata.off) == INTEGER_CST)
589 *exact = true;
590 else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
593 /* Subtract the offset from the size of the array. */
594 *exact = false;
595 tree temp = TREE_OPERAND (lendata.off, 1);
596 temp = fold_convert (ssizetype, temp);
597 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 else
600 *exact = false;
602 else
603 *exact = true;
605 *size = len;
607 return lendata.decl;
610 return NULL_TREE;
613 /* Compute the length of a null-terminated character string or wide
614 character string handling character sizes of 1, 2, and 4 bytes.
615 TREE_STRING_LENGTH is not the right way because it evaluates to
616 the size of the character array in bytes (as opposed to characters)
617 and because it can contain a zero byte in the middle.
619 ONLY_VALUE should be nonzero if the result is not going to be emitted
620 into the instruction stream and zero if it is going to be expanded.
621 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622 is returned, otherwise NULL, since
623 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624 evaluate the side-effects.
626 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627 accesses. Note that this implies the result is not going to be emitted
628 into the instruction stream.
630 Additional information about the string accessed may be recorded
631 in DATA. For example, if SRC references an unterminated string,
632 then the declaration will be stored in the DECL field. If the
633 length of the unterminated string can be determined, it'll be
634 stored in the LEN field. Note this length could well be different
635 than what a C strlen call would return.
637 ELTSIZE is 1 for normal single byte character strings, and 2 or
638 4 for wide characer strings. ELTSIZE is by default 1.
640 The value returned is of type `ssizetype'. */
642 tree
643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
645 /* If we were not passed a DATA pointer, then get one to a local
646 structure. That avoids having to check DATA for NULL before
647 each time we want to use it. */
648 c_strlen_data local_strlen_data = { };
649 if (!data)
650 data = &local_strlen_data;
652 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653 STRIP_NOPS (src);
654 if (TREE_CODE (src) == COND_EXPR
655 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
657 tree len1, len2;
659 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661 if (tree_int_cst_equal (len1, len2))
662 return len1;
665 if (TREE_CODE (src) == COMPOUND_EXPR
666 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
669 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
671 /* Offset from the beginning of the string in bytes. */
672 tree byteoff;
673 tree memsize;
674 tree decl;
675 src = string_constant (src, &byteoff, &memsize, &decl);
676 if (src == 0)
677 return NULL_TREE;
679 /* Determine the size of the string element. */
680 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681 return NULL_TREE;
683 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685 in case the latter is less than the size of the array, such as when
686 SRC refers to a short string literal used to initialize a large array.
687 In that case, the elements of the array after the terminating NUL are
688 all NUL. */
689 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690 strelts = strelts / eltsize;
692 if (!tree_fits_uhwi_p (memsize))
693 return NULL_TREE;
695 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
697 /* PTR can point to the byte representation of any string type, including
698 char* and wchar_t*. */
699 const char *ptr = TREE_STRING_POINTER (src);
701 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
703 /* The code below works only for single byte character types. */
704 if (eltsize != 1)
705 return NULL_TREE;
707 /* If the string has an internal NUL character followed by any
708 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 the offset to the following NUL if we don't know where to
710 start searching for it. */
711 unsigned len = string_length (ptr, eltsize, strelts);
713 /* Return when an embedded null character is found or none at all.
714 In the latter case, set the DECL/LEN field in the DATA structure
715 so that callers may examine them. */
716 if (len + 1 < strelts)
717 return NULL_TREE;
718 else if (len >= maxelts)
720 data->decl = decl;
721 data->off = byteoff;
722 data->minlen = ssize_int (len);
723 return NULL_TREE;
726 /* For empty strings the result should be zero. */
727 if (len == 0)
728 return ssize_int (0);
730 /* We don't know the starting offset, but we do know that the string
731 has no internal zero bytes. If the offset falls within the bounds
732 of the string subtract the offset from the length of the string,
733 and return that. Otherwise the length is zero. Take care to
734 use SAVE_EXPR in case the OFFSET has side-effects. */
735 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 : byteoff;
737 offsave = fold_convert_loc (loc, sizetype, offsave);
738 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 size_int (len));
740 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 offsave);
742 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 build_zero_cst (ssizetype));
747 /* Offset from the beginning of the string in elements. */
748 HOST_WIDE_INT eltoff;
750 /* We have a known offset into the string. Start searching there for
751 a null character if we can represent it as a single HOST_WIDE_INT. */
752 if (byteoff == 0)
753 eltoff = 0;
754 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755 eltoff = -1;
756 else
757 eltoff = tree_to_uhwi (byteoff) / eltsize;
759 /* If the offset is known to be out of bounds, warn, and call strlen at
760 runtime. */
761 if (eltoff < 0 || eltoff >= maxelts)
763 /* Suppress multiple warnings for propagated constant strings. */
764 if (only_value != 2
765 && !TREE_NO_WARNING (src)
766 && warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff))
769 TREE_NO_WARNING (src) = 1;
770 return NULL_TREE;
773 /* If eltoff is larger than strelts but less than maxelts the
774 string length is zero, since the excess memory will be zero. */
775 if (eltoff > strelts)
776 return ssize_int (0);
778 /* Use strlen to search for the first zero byte. Since any strings
779 constructed with build_string will have nulls appended, we win even
780 if we get handed something like (char[4])"abcd".
782 Since ELTOFF is our starting index into the string, no further
783 calculation is needed. */
784 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 strelts - eltoff);
787 /* Don't know what to return if there was no zero termination.
788 Ideally this would turn into a gcc_checking_assert over time.
789 Set DECL/LEN so callers can examine them. */
790 if (len >= maxelts - eltoff)
792 data->decl = decl;
793 data->off = byteoff;
794 data->minlen = ssize_int (len);
795 return NULL_TREE;
798 return ssize_int (len);
801 /* Return a constant integer corresponding to target reading
802 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
803 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804 are assumed to be zero, otherwise it reads as many characters
805 as needed. */
808 c_readstr (const char *str, scalar_int_mode mode,
809 bool null_terminated_p/*=true*/)
811 HOST_WIDE_INT ch;
812 unsigned int i, j;
813 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
815 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817 / HOST_BITS_PER_WIDE_INT;
819 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820 for (i = 0; i < len; i++)
821 tmp[i] = 0;
823 ch = 1;
824 for (i = 0; i < GET_MODE_SIZE (mode); i++)
826 j = i;
827 if (WORDS_BIG_ENDIAN)
828 j = GET_MODE_SIZE (mode) - i - 1;
829 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832 j *= BITS_PER_UNIT;
834 if (ch || !null_terminated_p)
835 ch = (unsigned char) str[i];
836 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
839 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840 return immed_wide_int_const (c, mode);
843 /* Cast a target constant CST to target CHAR and if that value fits into
844 host char type, return zero and put that value into variable pointed to by
845 P. */
847 static int
848 target_char_cast (tree cst, char *p)
850 unsigned HOST_WIDE_INT val, hostval;
852 if (TREE_CODE (cst) != INTEGER_CST
853 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854 return 1;
856 /* Do not care if it fits or not right here. */
857 val = TREE_INT_CST_LOW (cst);
859 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
862 hostval = val;
863 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
866 if (val != hostval)
867 return 1;
869 *p = hostval;
870 return 0;
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
877 static tree
878 builtin_save_expr (tree exp)
880 if (TREE_CODE (exp) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp) == 0
882 && (TREE_CODE (exp) == PARM_DECL
883 || (VAR_P (exp) && !TREE_STATIC (exp)))))
884 return exp;
886 return save_expr (exp);
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
893 static rtx
894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
896 int i;
897 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898 if (tem == NULL_RTX)
900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 tem = frame_pointer_rtx;
911 else
913 tem = hard_frame_pointer_rtx;
915 /* Tell reload not to eliminate the frame pointer. */
916 crtl->accesses_prior_frames = 1;
920 if (count > 0)
921 SETUP_FRAME_ADDRESSES ();
923 /* On the SPARC, the return address is not in the frame, it is in a
924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
927 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928 count--;
930 /* Scan back COUNT frames to the specified frame. */
931 for (i = 0; i < count; i++)
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
935 tem = DYNAMIC_CHAIN_ADDRESS (tem);
936 tem = memory_address (Pmode, tem);
937 tem = gen_frame_mem (Pmode, tem);
938 tem = copy_to_reg (tem);
941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
943 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944 return FRAME_ADDR_RTX (tem);
946 /* For __builtin_return_address, get the return address from that frame. */
947 #ifdef RETURN_ADDR_RTX
948 tem = RETURN_ADDR_RTX (count, tem);
949 #else
950 tem = memory_address (Pmode,
951 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952 tem = gen_frame_mem (Pmode, tem);
953 #endif
954 return tem;
957 /* Alias set used for setjmp buffer. */
958 static alias_set_type setjmp_alias_set = -1;
960 /* Construct the leading half of a __builtin_setjmp call. Control will
961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
964 void
965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
967 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 rtx stack_save;
969 rtx mem;
971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
974 buf_addr = convert_memory_address (Pmode, buf_addr);
976 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
982 mem = gen_rtx_MEM (Pmode, buf_addr);
983 set_mem_alias_set (mem, setjmp_alias_set);
984 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
986 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 GET_MODE_SIZE (Pmode))),
988 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (validize_mem (mem),
991 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
993 stack_save = gen_rtx_MEM (sa_mode,
994 plus_constant (Pmode, buf_addr,
995 2 * GET_MODE_SIZE (Pmode)));
996 set_mem_alias_set (stack_save, setjmp_alias_set);
997 emit_stack_save (SAVE_NONLOCAL, &stack_save);
999 /* If there is further processing to do, do it. */
1000 if (targetm.have_builtin_setjmp_setup ())
1001 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1003 /* We have a nonlocal label. */
1004 cfun->has_nonlocal_label = 1;
1007 /* Construct the trailing part of a __builtin_setjmp call. This is
1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1011 void
1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1014 rtx chain;
1016 /* Mark the FP as used when we get here, so we have to make sure it's
1017 marked as used by this function. */
1018 emit_use (hard_frame_pointer_rtx);
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
1022 chain = rtx_for_static_chain (current_function_decl, true);
1023 if (chain && REG_P (chain))
1024 emit_clobber (chain);
1026 /* Now put in the code to restore the frame pointer, and argument
1027 pointer, if needed. */
1028 if (! targetm.have_nonlocal_goto ())
1030 /* First adjust our frame pointer to its actual value. It was
1031 previously set to the start of the virtual area corresponding to
1032 the stacked variables when we branched here and now needs to be
1033 adjusted to the actual hardware fp value.
1035 Assignments to virtual registers are converted by
1036 instantiate_virtual_regs into the corresponding assignment
1037 to the underlying register (fp in this case) that makes
1038 the original assignment true.
1039 So the following insn will actually be decrementing fp by
1040 TARGET_STARTING_FRAME_OFFSET. */
1041 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1043 /* Restoring the frame pointer also modifies the hard frame pointer.
1044 Mark it used (so that the previous assignment remains live once
1045 the frame pointer is eliminated) and clobbered (to represent the
1046 implicit update from the assignment). */
1047 emit_use (hard_frame_pointer_rtx);
1048 emit_clobber (hard_frame_pointer_rtx);
1051 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1053 /* If the argument pointer can be eliminated in favor of the
1054 frame pointer, we don't need to restore it. We assume here
1055 that if such an elimination is present, it can always be used.
1056 This is the case on all known machines; if we don't make this
1057 assumption, we do unnecessary saving on many machines. */
1058 size_t i;
1059 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1061 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1062 if (elim_regs[i].from == ARG_POINTER_REGNUM
1063 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1064 break;
1066 if (i == ARRAY_SIZE (elim_regs))
1068 /* Now restore our arg pointer from the address at which it
1069 was saved in our stack frame. */
1070 emit_move_insn (crtl->args.internal_arg_pointer,
1071 copy_to_reg (get_arg_pointer_save_area ()));
1075 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1076 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1077 else if (targetm.have_nonlocal_goto_receiver ())
1078 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1079 else
1080 { /* Nothing */ }
1082 /* We must not allow the code we just generated to be reordered by
1083 scheduling. Specifically, the update of the frame pointer must
1084 happen immediately, not later. */
1085 emit_insn (gen_blockage ());
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089 all will be used on all machines). It operates similarly to the C
1090 library function of the same name, but is more efficient. Much of
1091 the code below is copied from the handling of non-local gotos. */
1093 static void
1094 expand_builtin_longjmp (rtx buf_addr, rtx value)
1096 rtx fp, lab, stack;
1097 rtx_insn *insn, *last;
1098 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1100 /* DRAP is needed for stack realign if longjmp is expanded to current
1101 function */
1102 if (SUPPORTS_STACK_ALIGNMENT)
1103 crtl->need_drap = true;
1105 if (setjmp_alias_set == -1)
1106 setjmp_alias_set = new_alias_set ();
1108 buf_addr = convert_memory_address (Pmode, buf_addr);
1110 buf_addr = force_reg (Pmode, buf_addr);
1112 /* We require that the user must pass a second argument of 1, because
1113 that is what builtin_setjmp will return. */
1114 gcc_assert (value == const1_rtx);
1116 last = get_last_insn ();
1117 if (targetm.have_builtin_longjmp ())
1118 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1119 else
1121 fp = gen_rtx_MEM (Pmode, buf_addr);
1122 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1123 GET_MODE_SIZE (Pmode)));
1125 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1126 2 * GET_MODE_SIZE (Pmode)));
1127 set_mem_alias_set (fp, setjmp_alias_set);
1128 set_mem_alias_set (lab, setjmp_alias_set);
1129 set_mem_alias_set (stack, setjmp_alias_set);
1131 /* Pick up FP, label, and SP from the block and jump. This code is
1132 from expand_goto in stmt.c; see there for detailed comments. */
1133 if (targetm.have_nonlocal_goto ())
1134 /* We have to pass a value to the nonlocal_goto pattern that will
1135 get copied into the static_chain pointer, but it does not matter
1136 what that value is, because builtin_setjmp does not use it. */
1137 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1138 else
1140 lab = copy_to_reg (lab);
1142 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145 /* Restore the frame pointer and stack pointer. We must use a
1146 temporary since the setjmp buffer may be a local. */
1147 fp = copy_to_reg (fp);
1148 emit_stack_restore (SAVE_NONLOCAL, stack);
1149 emit_move_insn (hard_frame_pointer_rtx, fp);
1151 emit_use (hard_frame_pointer_rtx);
1152 emit_use (stack_pointer_rtx);
1153 emit_indirect_jump (lab);
1157 /* Search backwards and mark the jump insn as a non-local goto.
1158 Note that this precludes the use of __builtin_longjmp to a
1159 __builtin_setjmp target in the same function. However, we've
1160 already cautioned the user that these functions are for
1161 internal exception handling use only. */
1162 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1164 gcc_assert (insn != last);
1166 if (JUMP_P (insn))
1168 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1169 break;
1171 else if (CALL_P (insn))
1172 break;
1176 static inline bool
1177 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1179 return (iter->i < iter->n);
1182 /* This function validates the types of a function call argument list
1183 against a specified list of tree_codes. If the last specifier is a 0,
1184 that represents an ellipsis, otherwise the last specifier must be a
1185 VOID_TYPE. */
1187 static bool
1188 validate_arglist (const_tree callexpr, ...)
1190 enum tree_code code;
1191 bool res = 0;
1192 va_list ap;
1193 const_call_expr_arg_iterator iter;
1194 const_tree arg;
1196 va_start (ap, callexpr);
1197 init_const_call_expr_arg_iterator (callexpr, &iter);
1199 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1200 tree fn = CALL_EXPR_FN (callexpr);
1201 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1203 for (unsigned argno = 1; ; ++argno)
1205 code = (enum tree_code) va_arg (ap, int);
1207 switch (code)
1209 case 0:
1210 /* This signifies an ellipses, any further arguments are all ok. */
1211 res = true;
1212 goto end;
1213 case VOID_TYPE:
1214 /* This signifies an endlink, if no arguments remain, return
1215 true, otherwise return false. */
1216 res = !more_const_call_expr_args_p (&iter);
1217 goto end;
1218 case POINTER_TYPE:
1219 /* The actual argument must be nonnull when either the whole
1220 called function has been declared nonnull, or when the formal
1221 argument corresponding to the actual argument has been. */
1222 if (argmap
1223 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1225 arg = next_const_call_expr_arg (&iter);
1226 if (!validate_arg (arg, code) || integer_zerop (arg))
1227 goto end;
1228 break;
1230 /* FALLTHRU */
1231 default:
1232 /* If no parameters remain or the parameter's code does not
1233 match the specified code, return false. Otherwise continue
1234 checking any remaining arguments. */
1235 arg = next_const_call_expr_arg (&iter);
1236 if (!validate_arg (arg, code))
1237 goto end;
1238 break;
1242 /* We need gotos here since we can only have one VA_CLOSE in a
1243 function. */
1244 end: ;
1245 va_end (ap);
1247 BITMAP_FREE (argmap);
1249 return res;
1252 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1253 and the address of the save area. */
1255 static rtx
1256 expand_builtin_nonlocal_goto (tree exp)
1258 tree t_label, t_save_area;
1259 rtx r_label, r_save_area, r_fp, r_sp;
1260 rtx_insn *insn;
1262 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1263 return NULL_RTX;
1265 t_label = CALL_EXPR_ARG (exp, 0);
1266 t_save_area = CALL_EXPR_ARG (exp, 1);
1268 r_label = expand_normal (t_label);
1269 r_label = convert_memory_address (Pmode, r_label);
1270 r_save_area = expand_normal (t_save_area);
1271 r_save_area = convert_memory_address (Pmode, r_save_area);
1272 /* Copy the address of the save location to a register just in case it was
1273 based on the frame pointer. */
1274 r_save_area = copy_to_reg (r_save_area);
1275 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1276 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1277 plus_constant (Pmode, r_save_area,
1278 GET_MODE_SIZE (Pmode)));
1280 crtl->has_nonlocal_goto = 1;
1282 /* ??? We no longer need to pass the static chain value, afaik. */
1283 if (targetm.have_nonlocal_goto ())
1284 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1285 else
1287 r_label = copy_to_reg (r_label);
1289 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1290 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1292 /* Restore the frame pointer and stack pointer. We must use a
1293 temporary since the setjmp buffer may be a local. */
1294 r_fp = copy_to_reg (r_fp);
1295 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1296 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1298 /* USE of hard_frame_pointer_rtx added for consistency;
1299 not clear if really needed. */
1300 emit_use (hard_frame_pointer_rtx);
1301 emit_use (stack_pointer_rtx);
1303 /* If the architecture is using a GP register, we must
1304 conservatively assume that the target function makes use of it.
1305 The prologue of functions with nonlocal gotos must therefore
1306 initialize the GP register to the appropriate value, and we
1307 must then make sure that this value is live at the point
1308 of the jump. (Note that this doesn't necessarily apply
1309 to targets with a nonlocal_goto pattern; they are free
1310 to implement it in their own way. Note also that this is
1311 a no-op if the GP register is a global invariant.) */
1312 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1313 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1314 emit_use (pic_offset_table_rtx);
1316 emit_indirect_jump (r_label);
1319 /* Search backwards to the jump insn and mark it as a
1320 non-local goto. */
1321 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1323 if (JUMP_P (insn))
1325 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1326 break;
1328 else if (CALL_P (insn))
1329 break;
1332 return const0_rtx;
1335 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1336 (not all will be used on all machines) that was passed to __builtin_setjmp.
1337 It updates the stack pointer in that block to the current value. This is
1338 also called directly by the SJLJ exception handling code. */
1340 void
1341 expand_builtin_update_setjmp_buf (rtx buf_addr)
1343 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1344 buf_addr = convert_memory_address (Pmode, buf_addr);
1345 rtx stack_save
1346 = gen_rtx_MEM (sa_mode,
1347 memory_address
1348 (sa_mode,
1349 plus_constant (Pmode, buf_addr,
1350 2 * GET_MODE_SIZE (Pmode))));
1352 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1355 /* Expand a call to __builtin_prefetch. For a target that does not support
1356 data prefetch, evaluate the memory address argument in case it has side
1357 effects. */
1359 static void
1360 expand_builtin_prefetch (tree exp)
1362 tree arg0, arg1, arg2;
1363 int nargs;
1364 rtx op0, op1, op2;
1366 if (!validate_arglist (exp, POINTER_TYPE, 0))
1367 return;
1369 arg0 = CALL_EXPR_ARG (exp, 0);
1371 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1372 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1373 locality). */
1374 nargs = call_expr_nargs (exp);
1375 if (nargs > 1)
1376 arg1 = CALL_EXPR_ARG (exp, 1);
1377 else
1378 arg1 = integer_zero_node;
1379 if (nargs > 2)
1380 arg2 = CALL_EXPR_ARG (exp, 2);
1381 else
1382 arg2 = integer_three_node;
1384 /* Argument 0 is an address. */
1385 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1387 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1388 if (TREE_CODE (arg1) != INTEGER_CST)
1390 error ("second argument to %<__builtin_prefetch%> must be a constant");
1391 arg1 = integer_zero_node;
1393 op1 = expand_normal (arg1);
1394 /* Argument 1 must be either zero or one. */
1395 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1397 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1398 " using zero");
1399 op1 = const0_rtx;
1402 /* Argument 2 (locality) must be a compile-time constant int. */
1403 if (TREE_CODE (arg2) != INTEGER_CST)
1405 error ("third argument to %<__builtin_prefetch%> must be a constant");
1406 arg2 = integer_zero_node;
1408 op2 = expand_normal (arg2);
1409 /* Argument 2 must be 0, 1, 2, or 3. */
1410 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1412 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1413 op2 = const0_rtx;
1416 if (targetm.have_prefetch ())
1418 struct expand_operand ops[3];
1420 create_address_operand (&ops[0], op0);
1421 create_integer_operand (&ops[1], INTVAL (op1));
1422 create_integer_operand (&ops[2], INTVAL (op2));
1423 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1424 return;
1427 /* Don't do anything with direct references to volatile memory, but
1428 generate code to handle other side effects. */
1429 if (!MEM_P (op0) && side_effects_p (op0))
1430 emit_insn (op0);
1433 /* Get a MEM rtx for expression EXP which is the address of an operand
1434 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1435 the maximum length of the block of memory that might be accessed or
1436 NULL if unknown. */
1438 static rtx
1439 get_memory_rtx (tree exp, tree len)
1441 tree orig_exp = exp;
1442 rtx addr, mem;
1444 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1445 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1446 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1447 exp = TREE_OPERAND (exp, 0);
1449 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1450 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1452 /* Get an expression we can use to find the attributes to assign to MEM.
1453 First remove any nops. */
1454 while (CONVERT_EXPR_P (exp)
1455 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1456 exp = TREE_OPERAND (exp, 0);
1458 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1459 (as builtin stringops may alias with anything). */
1460 exp = fold_build2 (MEM_REF,
1461 build_array_type (char_type_node,
1462 build_range_type (sizetype,
1463 size_one_node, len)),
1464 exp, build_int_cst (ptr_type_node, 0));
1466 /* If the MEM_REF has no acceptable address, try to get the base object
1467 from the original address we got, and build an all-aliasing
1468 unknown-sized access to that one. */
1469 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1470 set_mem_attributes (mem, exp, 0);
1471 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1472 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1473 0))))
1475 exp = build_fold_addr_expr (exp);
1476 exp = fold_build2 (MEM_REF,
1477 build_array_type (char_type_node,
1478 build_range_type (sizetype,
1479 size_zero_node,
1480 NULL)),
1481 exp, build_int_cst (ptr_type_node, 0));
1482 set_mem_attributes (mem, exp, 0);
1484 set_mem_alias_set (mem, 0);
1485 return mem;
1488 /* Built-in functions to perform an untyped call and return. */
1490 #define apply_args_mode \
1491 (this_target_builtins->x_apply_args_mode)
1492 #define apply_result_mode \
1493 (this_target_builtins->x_apply_result_mode)
1495 /* Return the size required for the block returned by __builtin_apply_args,
1496 and initialize apply_args_mode. */
1498 static int
1499 apply_args_size (void)
1501 static int size = -1;
1502 int align;
1503 unsigned int regno;
1505 /* The values computed by this function never change. */
1506 if (size < 0)
1508 /* The first value is the incoming arg-pointer. */
1509 size = GET_MODE_SIZE (Pmode);
1511 /* The second value is the structure value address unless this is
1512 passed as an "invisible" first argument. */
1513 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1514 size += GET_MODE_SIZE (Pmode);
1516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 if (FUNCTION_ARG_REGNO_P (regno))
1519 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1521 gcc_assert (mode != VOIDmode);
1523 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1524 if (size % align != 0)
1525 size = CEIL (size, align) * align;
1526 size += GET_MODE_SIZE (mode);
1527 apply_args_mode[regno] = mode;
1529 else
1531 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1534 return size;
1537 /* Return the size required for the block returned by __builtin_apply,
1538 and initialize apply_result_mode. */
1540 static int
1541 apply_result_size (void)
1543 static int size = -1;
1544 int align, regno;
1546 /* The values computed by this function never change. */
1547 if (size < 0)
1549 size = 0;
1551 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1552 if (targetm.calls.function_value_regno_p (regno))
1554 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1556 gcc_assert (mode != VOIDmode);
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 size += GET_MODE_SIZE (mode);
1562 apply_result_mode[regno] = mode;
1564 else
1565 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1567 /* Allow targets that use untyped_call and untyped_return to override
1568 the size so that machine-specific information can be stored here. */
1569 #ifdef APPLY_RESULT_SIZE
1570 size = APPLY_RESULT_SIZE;
1571 #endif
1573 return size;
1576 /* Create a vector describing the result block RESULT. If SAVEP is true,
1577 the result block is used to save the values; otherwise it is used to
1578 restore the values. */
1580 static rtx
1581 result_vector (int savep, rtx result)
1583 int regno, size, align, nelts;
1584 fixed_size_mode mode;
1585 rtx reg, mem;
1586 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1588 size = nelts = 0;
1589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 if ((mode = apply_result_mode[regno]) != VOIDmode)
1592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 if (size % align != 0)
1594 size = CEIL (size, align) * align;
1595 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1596 mem = adjust_address (result, mode, size);
1597 savevec[nelts++] = (savep
1598 ? gen_rtx_SET (mem, reg)
1599 : gen_rtx_SET (reg, mem));
1600 size += GET_MODE_SIZE (mode);
1602 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1605 /* Save the state required to perform an untyped call with the same
1606 arguments as were passed to the current function. */
1608 static rtx
1609 expand_builtin_apply_args_1 (void)
1611 rtx registers, tem;
1612 int size, align, regno;
1613 fixed_size_mode mode;
1614 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1616 /* Create a block where the arg-pointer, structure value address,
1617 and argument registers can be saved. */
1618 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1620 /* Walk past the arg-pointer and structure value address. */
1621 size = GET_MODE_SIZE (Pmode);
1622 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1623 size += GET_MODE_SIZE (Pmode);
1625 /* Save each register used in calling a function to the block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1633 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1635 emit_move_insn (adjust_address (registers, mode, size), tem);
1636 size += GET_MODE_SIZE (mode);
1639 /* Save the arg pointer to the block. */
1640 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1641 /* We need the pointer as the caller actually passed them to us, not
1642 as we might have pretended they were passed. Make sure it's a valid
1643 operand, as emit_move_insn isn't expected to handle a PLUS. */
1644 if (STACK_GROWS_DOWNWARD)
1646 = force_operand (plus_constant (Pmode, tem,
1647 crtl->args.pretend_args_size),
1648 NULL_RTX);
1649 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1651 size = GET_MODE_SIZE (Pmode);
1653 /* Save the structure value address unless this is passed as an
1654 "invisible" first argument. */
1655 if (struct_incoming_value)
1657 emit_move_insn (adjust_address (registers, Pmode, size),
1658 copy_to_reg (struct_incoming_value));
1659 size += GET_MODE_SIZE (Pmode);
1662 /* Return the address of the block. */
1663 return copy_addr_to_reg (XEXP (registers, 0));
1666 /* __builtin_apply_args returns block of memory allocated on
1667 the stack into which is stored the arg pointer, structure
1668 value address, static chain, and all the registers that might
1669 possibly be used in performing a function call. The code is
1670 moved to the start of the function so the incoming values are
1671 saved. */
1673 static rtx
1674 expand_builtin_apply_args (void)
1676 /* Don't do __builtin_apply_args more than once in a function.
1677 Save the result of the first call and reuse it. */
1678 if (apply_args_value != 0)
1679 return apply_args_value;
1681 /* When this function is called, it means that registers must be
1682 saved on entry to this function. So we migrate the
1683 call to the first insn of this function. */
1684 rtx temp;
1686 start_sequence ();
1687 temp = expand_builtin_apply_args_1 ();
1688 rtx_insn *seq = get_insns ();
1689 end_sequence ();
1691 apply_args_value = temp;
1693 /* Put the insns after the NOTE that starts the function.
1694 If this is inside a start_sequence, make the outer-level insn
1695 chain current, so the code is placed at the start of the
1696 function. If internal_arg_pointer is a non-virtual pseudo,
1697 it needs to be placed after the function that initializes
1698 that pseudo. */
1699 push_topmost_sequence ();
1700 if (REG_P (crtl->args.internal_arg_pointer)
1701 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1702 emit_insn_before (seq, parm_birth_insn);
1703 else
1704 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1705 pop_topmost_sequence ();
1706 return temp;
1710 /* Perform an untyped call and save the state required to perform an
1711 untyped return of whatever value was returned by the given function. */
1713 static rtx
1714 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1716 int size, align, regno;
1717 fixed_size_mode mode;
1718 rtx incoming_args, result, reg, dest, src;
1719 rtx_call_insn *call_insn;
1720 rtx old_stack_level = 0;
1721 rtx call_fusage = 0;
1722 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1724 arguments = convert_memory_address (Pmode, arguments);
1726 /* Create a block where the return registers can be saved. */
1727 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1729 /* Fetch the arg pointer from the ARGUMENTS block. */
1730 incoming_args = gen_reg_rtx (Pmode);
1731 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1732 if (!STACK_GROWS_DOWNWARD)
1733 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1734 incoming_args, 0, OPTAB_LIB_WIDEN);
1736 /* Push a new argument block and copy the arguments. Do not allow
1737 the (potential) memcpy call below to interfere with our stack
1738 manipulations. */
1739 do_pending_stack_adjust ();
1740 NO_DEFER_POP;
1742 /* Save the stack with nonlocal if available. */
1743 if (targetm.have_save_stack_nonlocal ())
1744 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1745 else
1746 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1748 /* Allocate a block of memory onto the stack and copy the memory
1749 arguments to the outgoing arguments address. We can pass TRUE
1750 as the 4th argument because we just saved the stack pointer
1751 and will restore it right after the call. */
1752 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1754 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1755 may have already set current_function_calls_alloca to true.
1756 current_function_calls_alloca won't be set if argsize is zero,
1757 so we have to guarantee need_drap is true here. */
1758 if (SUPPORTS_STACK_ALIGNMENT)
1759 crtl->need_drap = true;
1761 dest = virtual_outgoing_args_rtx;
1762 if (!STACK_GROWS_DOWNWARD)
1764 if (CONST_INT_P (argsize))
1765 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1766 else
1767 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1769 dest = gen_rtx_MEM (BLKmode, dest);
1770 set_mem_align (dest, PARM_BOUNDARY);
1771 src = gen_rtx_MEM (BLKmode, incoming_args);
1772 set_mem_align (src, PARM_BOUNDARY);
1773 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1775 /* Refer to the argument block. */
1776 apply_args_size ();
1777 arguments = gen_rtx_MEM (BLKmode, arguments);
1778 set_mem_align (arguments, PARM_BOUNDARY);
1780 /* Walk past the arg-pointer and structure value address. */
1781 size = GET_MODE_SIZE (Pmode);
1782 if (struct_value)
1783 size += GET_MODE_SIZE (Pmode);
1785 /* Restore each of the registers previously saved. Make USE insns
1786 for each of these registers for use in making the call. */
1787 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1788 if ((mode = apply_args_mode[regno]) != VOIDmode)
1790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1791 if (size % align != 0)
1792 size = CEIL (size, align) * align;
1793 reg = gen_rtx_REG (mode, regno);
1794 emit_move_insn (reg, adjust_address (arguments, mode, size));
1795 use_reg (&call_fusage, reg);
1796 size += GET_MODE_SIZE (mode);
1799 /* Restore the structure value address unless this is passed as an
1800 "invisible" first argument. */
1801 size = GET_MODE_SIZE (Pmode);
1802 if (struct_value)
1804 rtx value = gen_reg_rtx (Pmode);
1805 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1806 emit_move_insn (struct_value, value);
1807 if (REG_P (struct_value))
1808 use_reg (&call_fusage, struct_value);
1809 size += GET_MODE_SIZE (Pmode);
1812 /* All arguments and registers used for the call are set up by now! */
1813 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1815 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1816 and we don't want to load it into a register as an optimization,
1817 because prepare_call_address already did it if it should be done. */
1818 if (GET_CODE (function) != SYMBOL_REF)
1819 function = memory_address (FUNCTION_MODE, function);
1821 /* Generate the actual call instruction and save the return value. */
1822 if (targetm.have_untyped_call ())
1824 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1825 emit_call_insn (targetm.gen_untyped_call (mem, result,
1826 result_vector (1, result)));
1828 else if (targetm.have_call_value ())
1830 rtx valreg = 0;
1832 /* Locate the unique return register. It is not possible to
1833 express a call that sets more than one return register using
1834 call_value; use untyped_call for that. In fact, untyped_call
1835 only needs to save the return registers in the given block. */
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if ((mode = apply_result_mode[regno]) != VOIDmode)
1839 gcc_assert (!valreg); /* have_untyped_call required. */
1841 valreg = gen_rtx_REG (mode, regno);
1844 emit_insn (targetm.gen_call_value (valreg,
1845 gen_rtx_MEM (FUNCTION_MODE, function),
1846 const0_rtx, NULL_RTX, const0_rtx));
1848 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1850 else
1851 gcc_unreachable ();
1853 /* Find the CALL insn we just emitted, and attach the register usage
1854 information. */
1855 call_insn = last_call_insn ();
1856 add_function_usage_to (call_insn, call_fusage);
1858 /* Restore the stack. */
1859 if (targetm.have_save_stack_nonlocal ())
1860 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1861 else
1862 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1863 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1865 OK_DEFER_POP;
1867 /* Return the address of the result block. */
1868 result = copy_addr_to_reg (XEXP (result, 0));
1869 return convert_memory_address (ptr_mode, result);
1872 /* Perform an untyped return. */
1874 static void
1875 expand_builtin_return (rtx result)
1877 int size, align, regno;
1878 fixed_size_mode mode;
1879 rtx reg;
1880 rtx_insn *call_fusage = 0;
1882 result = convert_memory_address (Pmode, result);
1884 apply_result_size ();
1885 result = gen_rtx_MEM (BLKmode, result);
1887 if (targetm.have_untyped_return ())
1889 rtx vector = result_vector (0, result);
1890 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1891 emit_barrier ();
1892 return;
1895 /* Restore the return value and note that each value is used. */
1896 size = 0;
1897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898 if ((mode = apply_result_mode[regno]) != VOIDmode)
1900 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1901 if (size % align != 0)
1902 size = CEIL (size, align) * align;
1903 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1904 emit_move_insn (reg, adjust_address (result, mode, size));
1906 push_to_sequence (call_fusage);
1907 emit_use (reg);
1908 call_fusage = get_insns ();
1909 end_sequence ();
1910 size += GET_MODE_SIZE (mode);
1913 /* Put the USE insns before the return. */
1914 emit_insn (call_fusage);
1916 /* Return whatever values was restored by jumping directly to the end
1917 of the function. */
1918 expand_naked_return ();
1921 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1923 static enum type_class
1924 type_to_class (tree type)
1926 switch (TREE_CODE (type))
1928 case VOID_TYPE: return void_type_class;
1929 case INTEGER_TYPE: return integer_type_class;
1930 case ENUMERAL_TYPE: return enumeral_type_class;
1931 case BOOLEAN_TYPE: return boolean_type_class;
1932 case POINTER_TYPE: return pointer_type_class;
1933 case REFERENCE_TYPE: return reference_type_class;
1934 case OFFSET_TYPE: return offset_type_class;
1935 case REAL_TYPE: return real_type_class;
1936 case COMPLEX_TYPE: return complex_type_class;
1937 case FUNCTION_TYPE: return function_type_class;
1938 case METHOD_TYPE: return method_type_class;
1939 case RECORD_TYPE: return record_type_class;
1940 case UNION_TYPE:
1941 case QUAL_UNION_TYPE: return union_type_class;
1942 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1943 ? string_type_class : array_type_class);
1944 case LANG_TYPE: return lang_type_class;
1945 default: return no_type_class;
1949 /* Expand a call EXP to __builtin_classify_type. */
1951 static rtx
1952 expand_builtin_classify_type (tree exp)
1954 if (call_expr_nargs (exp))
1955 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1956 return GEN_INT (no_type_class);
1959 /* This helper macro, meant to be used in mathfn_built_in below, determines
1960 which among a set of builtin math functions is appropriate for a given type
1961 mode. The `F' (float) and `L' (long double) are automatically generated
1962 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1963 types, there are additional types that are considered with 'F32', 'F64',
1964 'F128', etc. suffixes. */
1965 #define CASE_MATHFN(MATHFN) \
1966 CASE_CFN_##MATHFN: \
1967 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1968 fcodel = BUILT_IN_##MATHFN##L ; break;
1969 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1970 types. */
1971 #define CASE_MATHFN_FLOATN(MATHFN) \
1972 CASE_CFN_##MATHFN: \
1973 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1974 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1975 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1976 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1977 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1978 break;
1979 /* Similar to above, but appends _R after any F/L suffix. */
1980 #define CASE_MATHFN_REENT(MATHFN) \
1981 case CFN_BUILT_IN_##MATHFN##_R: \
1982 case CFN_BUILT_IN_##MATHFN##F_R: \
1983 case CFN_BUILT_IN_##MATHFN##L_R: \
1984 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1985 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1987 /* Return a function equivalent to FN but operating on floating-point
1988 values of type TYPE, or END_BUILTINS if no such function exists.
1989 This is purely an operation on function codes; it does not guarantee
1990 that the target actually has an implementation of the function. */
1992 static built_in_function
1993 mathfn_built_in_2 (tree type, combined_fn fn)
1995 tree mtype;
1996 built_in_function fcode, fcodef, fcodel;
1997 built_in_function fcodef16 = END_BUILTINS;
1998 built_in_function fcodef32 = END_BUILTINS;
1999 built_in_function fcodef64 = END_BUILTINS;
2000 built_in_function fcodef128 = END_BUILTINS;
2001 built_in_function fcodef32x = END_BUILTINS;
2002 built_in_function fcodef64x = END_BUILTINS;
2003 built_in_function fcodef128x = END_BUILTINS;
2005 switch (fn)
2007 CASE_MATHFN (ACOS)
2008 CASE_MATHFN (ACOSH)
2009 CASE_MATHFN (ASIN)
2010 CASE_MATHFN (ASINH)
2011 CASE_MATHFN (ATAN)
2012 CASE_MATHFN (ATAN2)
2013 CASE_MATHFN (ATANH)
2014 CASE_MATHFN (CBRT)
2015 CASE_MATHFN_FLOATN (CEIL)
2016 CASE_MATHFN (CEXPI)
2017 CASE_MATHFN_FLOATN (COPYSIGN)
2018 CASE_MATHFN (COS)
2019 CASE_MATHFN (COSH)
2020 CASE_MATHFN (DREM)
2021 CASE_MATHFN (ERF)
2022 CASE_MATHFN (ERFC)
2023 CASE_MATHFN (EXP)
2024 CASE_MATHFN (EXP10)
2025 CASE_MATHFN (EXP2)
2026 CASE_MATHFN (EXPM1)
2027 CASE_MATHFN (FABS)
2028 CASE_MATHFN (FDIM)
2029 CASE_MATHFN_FLOATN (FLOOR)
2030 CASE_MATHFN_FLOATN (FMA)
2031 CASE_MATHFN_FLOATN (FMAX)
2032 CASE_MATHFN_FLOATN (FMIN)
2033 CASE_MATHFN (FMOD)
2034 CASE_MATHFN (FREXP)
2035 CASE_MATHFN (GAMMA)
2036 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2037 CASE_MATHFN (HUGE_VAL)
2038 CASE_MATHFN (HYPOT)
2039 CASE_MATHFN (ILOGB)
2040 CASE_MATHFN (ICEIL)
2041 CASE_MATHFN (IFLOOR)
2042 CASE_MATHFN (INF)
2043 CASE_MATHFN (IRINT)
2044 CASE_MATHFN (IROUND)
2045 CASE_MATHFN (ISINF)
2046 CASE_MATHFN (J0)
2047 CASE_MATHFN (J1)
2048 CASE_MATHFN (JN)
2049 CASE_MATHFN (LCEIL)
2050 CASE_MATHFN (LDEXP)
2051 CASE_MATHFN (LFLOOR)
2052 CASE_MATHFN (LGAMMA)
2053 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2054 CASE_MATHFN (LLCEIL)
2055 CASE_MATHFN (LLFLOOR)
2056 CASE_MATHFN (LLRINT)
2057 CASE_MATHFN (LLROUND)
2058 CASE_MATHFN (LOG)
2059 CASE_MATHFN (LOG10)
2060 CASE_MATHFN (LOG1P)
2061 CASE_MATHFN (LOG2)
2062 CASE_MATHFN (LOGB)
2063 CASE_MATHFN (LRINT)
2064 CASE_MATHFN (LROUND)
2065 CASE_MATHFN (MODF)
2066 CASE_MATHFN (NAN)
2067 CASE_MATHFN (NANS)
2068 CASE_MATHFN_FLOATN (NEARBYINT)
2069 CASE_MATHFN (NEXTAFTER)
2070 CASE_MATHFN (NEXTTOWARD)
2071 CASE_MATHFN (POW)
2072 CASE_MATHFN (POWI)
2073 CASE_MATHFN (POW10)
2074 CASE_MATHFN (REMAINDER)
2075 CASE_MATHFN (REMQUO)
2076 CASE_MATHFN_FLOATN (RINT)
2077 CASE_MATHFN_FLOATN (ROUND)
2078 CASE_MATHFN (SCALB)
2079 CASE_MATHFN (SCALBLN)
2080 CASE_MATHFN (SCALBN)
2081 CASE_MATHFN (SIGNBIT)
2082 CASE_MATHFN (SIGNIFICAND)
2083 CASE_MATHFN (SIN)
2084 CASE_MATHFN (SINCOS)
2085 CASE_MATHFN (SINH)
2086 CASE_MATHFN_FLOATN (SQRT)
2087 CASE_MATHFN (TAN)
2088 CASE_MATHFN (TANH)
2089 CASE_MATHFN (TGAMMA)
2090 CASE_MATHFN_FLOATN (TRUNC)
2091 CASE_MATHFN (Y0)
2092 CASE_MATHFN (Y1)
2093 CASE_MATHFN (YN)
2095 default:
2096 return END_BUILTINS;
2099 mtype = TYPE_MAIN_VARIANT (type);
2100 if (mtype == double_type_node)
2101 return fcode;
2102 else if (mtype == float_type_node)
2103 return fcodef;
2104 else if (mtype == long_double_type_node)
2105 return fcodel;
2106 else if (mtype == float16_type_node)
2107 return fcodef16;
2108 else if (mtype == float32_type_node)
2109 return fcodef32;
2110 else if (mtype == float64_type_node)
2111 return fcodef64;
2112 else if (mtype == float128_type_node)
2113 return fcodef128;
2114 else if (mtype == float32x_type_node)
2115 return fcodef32x;
2116 else if (mtype == float64x_type_node)
2117 return fcodef64x;
2118 else if (mtype == float128x_type_node)
2119 return fcodef128x;
2120 else
2121 return END_BUILTINS;
2124 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2125 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2126 otherwise use the explicit declaration. If we can't do the conversion,
2127 return null. */
2129 static tree
2130 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2132 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2133 if (fcode2 == END_BUILTINS)
2134 return NULL_TREE;
2136 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2137 return NULL_TREE;
2139 return builtin_decl_explicit (fcode2);
2142 /* Like mathfn_built_in_1, but always use the implicit array. */
2144 tree
2145 mathfn_built_in (tree type, combined_fn fn)
2147 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2150 /* Like mathfn_built_in_1, but take a built_in_function and
2151 always use the implicit array. */
2153 tree
2154 mathfn_built_in (tree type, enum built_in_function fn)
2156 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2159 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2160 return its code, otherwise return IFN_LAST. Note that this function
2161 only tests whether the function is defined in internals.def, not whether
2162 it is actually available on the target. */
2164 internal_fn
2165 associated_internal_fn (tree fndecl)
2167 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2168 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2174 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2175 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2176 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2177 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2178 #include "internal-fn.def"
2180 CASE_FLT_FN (BUILT_IN_POW10):
2181 return IFN_EXP10;
2183 CASE_FLT_FN (BUILT_IN_DREM):
2184 return IFN_REMAINDER;
2186 CASE_FLT_FN (BUILT_IN_SCALBN):
2187 CASE_FLT_FN (BUILT_IN_SCALBLN):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2189 return IFN_LDEXP;
2190 return IFN_LAST;
2192 default:
2193 return IFN_LAST;
2197 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2198 on the current target by a call to an internal function, return the
2199 code of that internal function, otherwise return IFN_LAST. The caller
2200 is responsible for ensuring that any side-effects of the built-in
2201 call are dealt with correctly. E.g. if CALL sets errno, the caller
2202 must decide that the errno result isn't needed or make it available
2203 in some other way. */
2205 internal_fn
2206 replacement_internal_fn (gcall *call)
2208 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2210 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2211 if (ifn != IFN_LAST)
2213 tree_pair types = direct_internal_fn_types (ifn, call);
2214 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2215 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2216 return ifn;
2219 return IFN_LAST;
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2227 operands. */
2229 static rtx
2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2232 optab builtin_optab;
2233 rtx op0, op1, op2, result;
2234 rtx_insn *insns;
2235 tree fndecl = get_callee_fndecl (exp);
2236 tree arg0, arg1, arg2;
2237 machine_mode mode;
2239 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240 return NULL_RTX;
2242 arg0 = CALL_EXPR_ARG (exp, 0);
2243 arg1 = CALL_EXPR_ARG (exp, 1);
2244 arg2 = CALL_EXPR_ARG (exp, 2);
2246 switch (DECL_FUNCTION_CODE (fndecl))
2248 CASE_FLT_FN (BUILT_IN_FMA):
2249 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2262 result = gen_reg_rtx (mode);
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2273 start_sequence ();
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2294 return result;
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2307 optab builtin_optab;
2308 rtx op0;
2309 rtx_insn *insns;
2310 tree fndecl = get_callee_fndecl (exp);
2311 machine_mode mode;
2312 tree arg;
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2317 arg = CALL_EXPR_ARG (exp, 0);
2319 switch (DECL_FUNCTION_CODE (fndecl))
2321 CASE_FLT_FN (BUILT_IN_SIN):
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = sincos_optab; break;
2324 default:
2325 gcc_unreachable ();
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (exp));
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334 switch (DECL_FUNCTION_CODE (fndecl))
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 builtin_optab = sin_optab; break;
2338 CASE_FLT_FN (BUILT_IN_COS):
2339 builtin_optab = cos_optab; break;
2340 default:
2341 gcc_unreachable ();
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2347 rtx result = gen_reg_rtx (mode);
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2354 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2356 start_sequence ();
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab == sincos_optab)
2362 int ok;
2364 switch (DECL_FUNCTION_CODE (fndecl))
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 break;
2372 default:
2373 gcc_unreachable ();
2375 gcc_assert (ok);
2377 else
2378 result = expand_unop (mode, builtin_optab, op0, result, 0);
2380 if (result != 0)
2382 /* Output the entire sequence. */
2383 insns = get_insns ();
2384 end_sequence ();
2385 emit_insn (insns);
2386 return result;
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2392 end_sequence ();
2395 return expand_call (exp, target, target == const0_rtx);
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2405 bool errno_set = false;
2406 optab builtin_optab = unknown_optab;
2407 machine_mode mode;
2409 switch (DECL_FUNCTION_CODE (fndecl))
2411 CASE_FLT_FN (BUILT_IN_ILOGB):
2412 errno_set = true; builtin_optab = ilogb_optab; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF):
2414 builtin_optab = isinf_optab; break;
2415 case BUILT_IN_ISNORMAL:
2416 case BUILT_IN_ISFINITE:
2417 CASE_FLT_FN (BUILT_IN_FINITE):
2418 case BUILT_IN_FINITED32:
2419 case BUILT_IN_FINITED64:
2420 case BUILT_IN_FINITED128:
2421 case BUILT_IN_ISINFD32:
2422 case BUILT_IN_ISINFD64:
2423 case BUILT_IN_ISINFD128:
2424 /* These builtins have no optabs (yet). */
2425 break;
2426 default:
2427 gcc_unreachable ();
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math && errno_set)
2432 return CODE_FOR_nothing;
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 if (builtin_optab)
2438 return optab_handler (builtin_optab, mode);
2439 return CODE_FOR_nothing;
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2444 isnan, etc).
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2452 enum insn_code icode = CODE_FOR_nothing;
2453 rtx op0;
2454 tree fndecl = get_callee_fndecl (exp);
2455 machine_mode mode;
2456 tree arg;
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 icode = interclass_mathfn_icode (arg, fndecl);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2465 if (icode != CODE_FOR_nothing)
2467 struct expand_operand ops[1];
2468 rtx_insn *last = get_last_insn ();
2469 tree orig_arg = arg;
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2476 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2478 if (mode != GET_MODE (op0))
2479 op0 = convert_to_mode (mode, op0, 0);
2481 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482 if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 return ops[0].value;
2486 delete_insns_since (last);
2487 CALL_EXPR_ARG (exp, 0) = orig_arg;
2490 return NULL_RTX;
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function. */
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2501 rtx op0, op1, op2, target1, target2;
2502 machine_mode mode;
2503 tree arg, sinp, cosp;
2504 int result;
2505 location_t loc = EXPR_LOCATION (exp);
2506 tree alias_type, alias_off;
2508 if (!validate_arglist (exp, REAL_TYPE,
2509 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 sinp = CALL_EXPR_ARG (exp, 1);
2514 cosp = CALL_EXPR_ARG (exp, 2);
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (arg));
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521 return NULL_RTX;
2523 target1 = gen_reg_rtx (mode);
2524 target2 = gen_reg_rtx (mode);
2526 op0 = expand_normal (arg);
2527 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528 alias_off = build_int_cst (alias_type, 0);
2529 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 sinp, alias_off));
2531 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 cosp, alias_off));
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537 gcc_assert (result);
2539 /* Move target1 and target2 to the memory locations indicated
2540 by op1 and op2. */
2541 emit_move_insn (op1, target1);
2542 emit_move_insn (op2, target2);
2544 return const0_rtx;
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2554 tree fndecl = get_callee_fndecl (exp);
2555 tree arg, type;
2556 machine_mode mode;
2557 rtx op0, op1, op2;
2558 location_t loc = EXPR_LOCATION (exp);
2560 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 type = TREE_TYPE (arg);
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2572 op1 = gen_reg_rtx (mode);
2573 op2 = gen_reg_rtx (mode);
2575 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2580 else if (targetm.libc_has_function (function_sincos))
2582 tree call, fn = NULL_TREE;
2583 tree top1, top2;
2584 rtx op1a, op2a;
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592 else
2593 gcc_unreachable ();
2595 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597 op1a = copy_addr_to_reg (XEXP (op1, 0));
2598 op2a = copy_addr_to_reg (XEXP (op2, 0));
2599 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2602 /* Make sure not to fold the sincos call again. */
2603 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 call, 3, arg, top1, top2));
2607 else
2609 tree call, fn = NULL_TREE, narg;
2610 tree ctype = build_complex_type (type);
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618 else
2619 gcc_unreachable ();
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn == NULL_TREE)
2626 tree fntype;
2627 const char *name = NULL;
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 name = "cexpf";
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 name = "cexp";
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 name = "cexpl";
2636 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 fn = build_fn_decl (name, fntype);
2640 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 build_real (type, dconst0), arg);
2643 /* Make sure not to fold the cexp call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 target, VOIDmode, EXPAND_NORMAL);
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 make_tree (TREE_TYPE (arg), op2),
2652 make_tree (TREE_TYPE (arg), op1)),
2653 target, VOIDmode, EXPAND_NORMAL);
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2664 va_list ap;
2665 tree fntype = TREE_TYPE (fndecl);
2666 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2668 va_start (ap, n);
2669 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670 va_end (ap);
2671 SET_EXPR_LOCATION (fn, loc);
2672 return fn;
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2685 convert_optab builtin_optab;
2686 rtx op0, tmp;
2687 rtx_insn *insns;
2688 tree fndecl = get_callee_fndecl (exp);
2689 enum built_in_function fallback_fn;
2690 tree fallback_fndecl;
2691 machine_mode mode;
2692 tree arg;
2694 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695 return NULL_RTX;
2697 arg = CALL_EXPR_ARG (exp, 0);
2699 switch (DECL_FUNCTION_CODE (fndecl))
2701 CASE_FLT_FN (BUILT_IN_ICEIL):
2702 CASE_FLT_FN (BUILT_IN_LCEIL):
2703 CASE_FLT_FN (BUILT_IN_LLCEIL):
2704 builtin_optab = lceil_optab;
2705 fallback_fn = BUILT_IN_CEIL;
2706 break;
2708 CASE_FLT_FN (BUILT_IN_IFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LFLOOR):
2710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711 builtin_optab = lfloor_optab;
2712 fallback_fn = BUILT_IN_FLOOR;
2713 break;
2715 default:
2716 gcc_unreachable ();
2719 /* Make a suitable register to place result in. */
2720 mode = TYPE_MODE (TREE_TYPE (exp));
2722 target = gen_reg_rtx (mode);
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2729 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2731 start_sequence ();
2733 /* Compute into TARGET. */
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return target;
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns). */
2745 end_sequence ();
2747 /* Fall back to floating point rounding optab. */
2748 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2750 /* For non-C99 targets we may end up without a fallback fndecl here
2751 if the user called __builtin_lfloor directly. In this case emit
2752 a call to the floor/ceil variants nevertheless. This should result
2753 in the best user experience for not full C99 targets. */
2754 if (fallback_fndecl == NULL_TREE)
2756 tree fntype;
2757 const char *name = NULL;
2759 switch (DECL_FUNCTION_CODE (fndecl))
2761 case BUILT_IN_ICEIL:
2762 case BUILT_IN_LCEIL:
2763 case BUILT_IN_LLCEIL:
2764 name = "ceil";
2765 break;
2766 case BUILT_IN_ICEILF:
2767 case BUILT_IN_LCEILF:
2768 case BUILT_IN_LLCEILF:
2769 name = "ceilf";
2770 break;
2771 case BUILT_IN_ICEILL:
2772 case BUILT_IN_LCEILL:
2773 case BUILT_IN_LLCEILL:
2774 name = "ceill";
2775 break;
2776 case BUILT_IN_IFLOOR:
2777 case BUILT_IN_LFLOOR:
2778 case BUILT_IN_LLFLOOR:
2779 name = "floor";
2780 break;
2781 case BUILT_IN_IFLOORF:
2782 case BUILT_IN_LFLOORF:
2783 case BUILT_IN_LLFLOORF:
2784 name = "floorf";
2785 break;
2786 case BUILT_IN_IFLOORL:
2787 case BUILT_IN_LFLOORL:
2788 case BUILT_IN_LLFLOORL:
2789 name = "floorl";
2790 break;
2791 default:
2792 gcc_unreachable ();
2795 fntype = build_function_type_list (TREE_TYPE (arg),
2796 TREE_TYPE (arg), NULL_TREE);
2797 fallback_fndecl = build_fn_decl (name, fntype);
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2802 tmp = expand_normal (exp);
2803 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2805 /* Truncate the result of floating point optab to integer
2806 via expand_fix (). */
2807 target = gen_reg_rtx (mode);
2808 expand_fix (target, tmp, 0);
2810 return target;
2813 /* Expand a call to one of the builtin math functions doing integer
2814 conversion (lrint).
2815 Return 0 if a normal call should be emitted rather than expanding the
2816 function in-line. EXP is the expression that is a call to the builtin
2817 function; if convenient, the result should be placed in TARGET. */
2819 static rtx
2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2822 convert_optab builtin_optab;
2823 rtx op0;
2824 rtx_insn *insns;
2825 tree fndecl = get_callee_fndecl (exp);
2826 tree arg;
2827 machine_mode mode;
2828 enum built_in_function fallback_fn = BUILT_IN_NONE;
2830 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831 return NULL_RTX;
2833 arg = CALL_EXPR_ARG (exp, 0);
2835 switch (DECL_FUNCTION_CODE (fndecl))
2837 CASE_FLT_FN (BUILT_IN_IRINT):
2838 fallback_fn = BUILT_IN_LRINT;
2839 gcc_fallthrough ();
2840 CASE_FLT_FN (BUILT_IN_LRINT):
2841 CASE_FLT_FN (BUILT_IN_LLRINT):
2842 builtin_optab = lrint_optab;
2843 break;
2845 CASE_FLT_FN (BUILT_IN_IROUND):
2846 fallback_fn = BUILT_IN_LROUND;
2847 gcc_fallthrough ();
2848 CASE_FLT_FN (BUILT_IN_LROUND):
2849 CASE_FLT_FN (BUILT_IN_LLROUND):
2850 builtin_optab = lround_optab;
2851 break;
2853 default:
2854 gcc_unreachable ();
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859 return NULL_RTX;
2861 /* Make a suitable register to place result in. */
2862 mode = TYPE_MODE (TREE_TYPE (exp));
2864 /* There's no easy way to detect the case we need to set EDOM. */
2865 if (!flag_errno_math)
2867 rtx result = gen_reg_rtx (mode);
2869 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 need to expand the argument again. This way, we will not perform
2871 side-effects more the once. */
2872 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2874 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2876 start_sequence ();
2878 if (expand_sfix_optab (result, op0, builtin_optab))
2880 /* Output the entire sequence. */
2881 insns = get_insns ();
2882 end_sequence ();
2883 emit_insn (insns);
2884 return result;
2887 /* If we were unable to expand via the builtin, stop the sequence
2888 (without outputting the insns) and call to the library function
2889 with the stabilized argument list. */
2890 end_sequence ();
2893 if (fallback_fn != BUILT_IN_NONE)
2895 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2896 targets, (int) round (x) should never be transformed into
2897 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 a call to lround in the hope that the target provides at least some
2899 C99 functions. This should result in the best user experience for
2900 not full C99 targets. */
2901 tree fallback_fndecl = mathfn_built_in_1
2902 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2904 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 fallback_fndecl, 1, arg);
2907 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909 return convert_to_mode (mode, target, 0);
2912 return expand_call (exp, target, target == const0_rtx);
2915 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2916 a normal call should be emitted rather than expanding the function
2917 in-line. EXP is the expression that is a call to the builtin
2918 function; if convenient, the result should be placed in TARGET. */
2920 static rtx
2921 expand_builtin_powi (tree exp, rtx target)
2923 tree arg0, arg1;
2924 rtx op0, op1;
2925 machine_mode mode;
2926 machine_mode mode2;
2928 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2933 mode = TYPE_MODE (TREE_TYPE (exp));
2935 /* Emit a libcall to libgcc. */
2937 /* Mode of the 2nd argument must match that of an int. */
2938 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2940 if (target == NULL_RTX)
2941 target = gen_reg_rtx (mode);
2943 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944 if (GET_MODE (op0) != mode)
2945 op0 = convert_to_mode (mode, op0, 0);
2946 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947 if (GET_MODE (op1) != mode2)
2948 op1 = convert_to_mode (mode2, op1, 0);
2950 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 target, LCT_CONST, mode,
2952 op0, mode, op1, mode2);
2954 return target;
2957 /* Expand expression EXP which is a call to the strlen builtin. Return
2958 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2959 try to get the result in TARGET, if convenient. */
2961 static rtx
2962 expand_builtin_strlen (tree exp, rtx target,
2963 machine_mode target_mode)
2965 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2968 struct expand_operand ops[4];
2969 rtx pat;
2970 tree len;
2971 tree src = CALL_EXPR_ARG (exp, 0);
2972 rtx src_reg;
2973 rtx_insn *before_strlen;
2974 machine_mode insn_mode;
2975 enum insn_code icode = CODE_FOR_nothing;
2976 unsigned int align;
2978 /* If the length can be computed at compile-time, return it. */
2979 len = c_strlen (src, 0);
2980 if (len)
2981 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len = c_strlen (src, 1);
2989 if (len && TREE_CODE (len) == INTEGER_CST)
2991 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2995 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2997 /* If SRC is not a pointer type, don't do this operation inline. */
2998 if (align == 0)
2999 return NULL_RTX;
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3004 icode = optab_handler (strlen_optab, insn_mode);
3005 if (icode != CODE_FOR_nothing)
3006 break;
3008 if (insn_mode == VOIDmode)
3009 return NULL_RTX;
3011 /* Make a place to hold the source address. We will not expand
3012 the actual source until we are sure that the expansion will
3013 not fail -- there are trees that cannot be expanded twice. */
3014 src_reg = gen_reg_rtx (Pmode);
3016 /* Mark the beginning of the strlen sequence so we can emit the
3017 source operand later. */
3018 before_strlen = get_last_insn ();
3020 create_output_operand (&ops[0], target, insn_mode);
3021 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022 create_integer_operand (&ops[2], 0);
3023 create_integer_operand (&ops[3], align);
3024 if (!maybe_expand_insn (icode, 4, ops))
3025 return NULL_RTX;
3027 /* Check to see if the argument was declared attribute nonstring
3028 and if so, issue a warning since at this point it's not known
3029 to be nul-terminated. */
3030 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3032 /* Now that we are assured of success, expand the source. */
3033 start_sequence ();
3034 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3035 if (pat != src_reg)
3037 #ifdef POINTERS_EXTEND_UNSIGNED
3038 if (GET_MODE (pat) != Pmode)
3039 pat = convert_to_mode (Pmode, pat,
3040 POINTERS_EXTEND_UNSIGNED);
3041 #endif
3042 emit_move_insn (src_reg, pat);
3044 pat = get_insns ();
3045 end_sequence ();
3047 if (before_strlen)
3048 emit_insn_after (pat, before_strlen);
3049 else
3050 emit_insn_before (pat, get_insns ());
3052 /* Return the value in the proper mode for this function. */
3053 if (GET_MODE (ops[0].value) == target_mode)
3054 target = ops[0].value;
3055 else if (target != 0)
3056 convert_move (target, ops[0].value, 0);
3057 else
3058 target = convert_to_mode (target_mode, ops[0].value, 0);
3060 return target;
3063 /* Expand call EXP to the strnlen built-in, returning the result
3064 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3066 static rtx
3067 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3069 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3070 return NULL_RTX;
3072 tree src = CALL_EXPR_ARG (exp, 0);
3073 tree bound = CALL_EXPR_ARG (exp, 1);
3075 if (!bound)
3076 return NULL_RTX;
3078 location_t loc = UNKNOWN_LOCATION;
3079 if (EXPR_HAS_LOCATION (exp))
3080 loc = EXPR_LOCATION (exp);
3082 tree maxobjsize = max_object_size ();
3083 tree func = get_callee_fndecl (exp);
3085 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3086 so these conversions aren't necessary. */
3087 c_strlen_data lendata = { };
3088 tree len = c_strlen (src, 0, &lendata, 1);
3089 if (len)
3090 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3092 if (TREE_CODE (bound) == INTEGER_CST)
3094 if (!TREE_NO_WARNING (exp)
3095 && tree_int_cst_lt (maxobjsize, bound)
3096 && warning_at (loc, OPT_Wstringop_overflow_,
3097 "%K%qD specified bound %E "
3098 "exceeds maximum object size %E",
3099 exp, func, bound, maxobjsize))
3100 TREE_NO_WARNING (exp) = true;
3102 bool exact = true;
3103 if (!len || TREE_CODE (len) != INTEGER_CST)
3105 /* Clear EXACT if LEN may be less than SRC suggests,
3106 such as in
3107 strnlen (&a[i], sizeof a)
3108 where the value of i is unknown. Unless i's value is
3109 zero, the call is unsafe because the bound is greater. */
3110 lendata.decl = unterminated_array (src, &len, &exact);
3111 if (!lendata.decl)
3112 return NULL_RTX;
3115 if (lendata.decl
3116 && !TREE_NO_WARNING (exp)
3117 && ((tree_int_cst_lt (len, bound))
3118 || !exact))
3120 location_t warnloc
3121 = expansion_point_location_if_in_system_header (loc);
3123 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3124 exact
3125 ? G_("%K%qD specified bound %E exceeds the size %E "
3126 "of unterminated array")
3127 : G_("%K%qD specified bound %E may exceed the size "
3128 "of at most %E of unterminated array"),
3129 exp, func, bound, len))
3131 inform (DECL_SOURCE_LOCATION (lendata.decl),
3132 "referenced argument declared here");
3133 TREE_NO_WARNING (exp) = true;
3134 return NULL_RTX;
3138 if (!len)
3139 return NULL_RTX;
3141 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 if (TREE_CODE (bound) != SSA_NAME)
3146 return NULL_RTX;
3148 wide_int min, max;
3149 enum value_range_kind rng = get_range_info (bound, &min, &max);
3150 if (rng != VR_RANGE)
3151 return NULL_RTX;
3153 if (!TREE_NO_WARNING (exp)
3154 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3155 && warning_at (loc, OPT_Wstringop_overflow_,
3156 "%K%qD specified bound [%wu, %wu] "
3157 "exceeds maximum object size %E",
3158 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3159 TREE_NO_WARNING (exp) = true;
3161 bool exact = true;
3162 if (!len || TREE_CODE (len) != INTEGER_CST)
3164 lendata.decl = unterminated_array (src, &len, &exact);
3165 if (!lendata.decl)
3166 return NULL_RTX;
3169 if (lendata.decl
3170 && !TREE_NO_WARNING (exp)
3171 && (wi::ltu_p (wi::to_wide (len), min)
3172 || !exact))
3174 location_t warnloc
3175 = expansion_point_location_if_in_system_header (loc);
3177 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3178 exact
3179 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3180 "the size %E of unterminated array")
3181 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3182 "the size of at most %E of unterminated array"),
3183 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3185 inform (DECL_SOURCE_LOCATION (lendata.decl),
3186 "referenced argument declared here");
3187 TREE_NO_WARNING (exp) = true;
3191 if (lendata.decl)
3192 return NULL_RTX;
3194 if (wi::gtu_p (min, wi::to_wide (len)))
3195 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3197 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3198 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3201 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3202 bytes from constant string DATA + OFFSET and return it as target
3203 constant. */
3205 static rtx
3206 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3207 scalar_int_mode mode)
3209 const char *str = (const char *) data;
3211 gcc_assert (offset >= 0
3212 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3213 <= strlen (str) + 1));
3215 return c_readstr (str + offset, mode);
3218 /* LEN specify length of the block of memcpy/memset operation.
3219 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3220 In some cases we can make very likely guess on max size, then we
3221 set it into PROBABLE_MAX_SIZE. */
3223 static void
3224 determine_block_size (tree len, rtx len_rtx,
3225 unsigned HOST_WIDE_INT *min_size,
3226 unsigned HOST_WIDE_INT *max_size,
3227 unsigned HOST_WIDE_INT *probable_max_size)
3229 if (CONST_INT_P (len_rtx))
3231 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3232 return;
3234 else
3236 wide_int min, max;
3237 enum value_range_kind range_type = VR_UNDEFINED;
3239 /* Determine bounds from the type. */
3240 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3241 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3242 else
3243 *min_size = 0;
3244 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3245 *probable_max_size = *max_size
3246 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3247 else
3248 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3250 if (TREE_CODE (len) == SSA_NAME)
3251 range_type = get_range_info (len, &min, &max);
3252 if (range_type == VR_RANGE)
3254 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3255 *min_size = min.to_uhwi ();
3256 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3257 *probable_max_size = *max_size = max.to_uhwi ();
3259 else if (range_type == VR_ANTI_RANGE)
3261 /* Anti range 0...N lets us to determine minimal size to N+1. */
3262 if (min == 0)
3264 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3265 *min_size = max.to_uhwi () + 1;
3267 /* Code like
3269 int n;
3270 if (n < 100)
3271 memcpy (a, b, n)
3273 Produce anti range allowing negative values of N. We still
3274 can use the information and make a guess that N is not negative.
3276 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3277 *probable_max_size = min.to_uhwi () - 1;
3280 gcc_checking_assert (*max_size <=
3281 (unsigned HOST_WIDE_INT)
3282 GET_MODE_MASK (GET_MODE (len_rtx)));
3285 /* Try to verify that the sizes and lengths of the arguments to a string
3286 manipulation function given by EXP are within valid bounds and that
3287 the operation does not lead to buffer overflow or read past the end.
3288 Arguments other than EXP may be null. When non-null, the arguments
3289 have the following meaning:
3290 DST is the destination of a copy call or NULL otherwise.
3291 SRC is the source of a copy call or NULL otherwise.
3292 DSTWRITE is the number of bytes written into the destination obtained
3293 from the user-supplied size argument to the function (such as in
3294 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3295 MAXREAD is the user-supplied bound on the length of the source sequence
3296 (such as in strncat(d, s, N). It specifies the upper limit on the number
3297 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3298 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3299 expression EXP is a string function call (as opposed to a memory call
3300 like memcpy). As an exception, SRCSTR can also be an integer denoting
3301 the precomputed size of the source string or object (for functions like
3302 memcpy).
3303 DSTSIZE is the size of the destination object specified by the last
3304 argument to the _chk builtins, typically resulting from the expansion
3305 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3306 DSTSIZE).
3308 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3309 SIZE_MAX.
3311 If the call is successfully verified as safe return true, otherwise
3312 return false. */
3314 static bool
3315 check_access (tree exp, tree, tree, tree dstwrite,
3316 tree maxread, tree srcstr, tree dstsize)
3318 int opt = OPT_Wstringop_overflow_;
3320 /* The size of the largest object is half the address space, or
3321 PTRDIFF_MAX. (This is way too permissive.) */
3322 tree maxobjsize = max_object_size ();
3324 /* Either the length of the source string for string functions or
3325 the size of the source object for raw memory functions. */
3326 tree slen = NULL_TREE;
3328 tree range[2] = { NULL_TREE, NULL_TREE };
3330 /* Set to true when the exact number of bytes written by a string
3331 function like strcpy is not known and the only thing that is
3332 known is that it must be at least one (for the terminating nul). */
3333 bool at_least_one = false;
3334 if (srcstr)
3336 /* SRCSTR is normally a pointer to string but as a special case
3337 it can be an integer denoting the length of a string. */
3338 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3340 /* Try to determine the range of lengths the source string
3341 refers to. If it can be determined and is less than
3342 the upper bound given by MAXREAD add one to it for
3343 the terminating nul. Otherwise, set it to one for
3344 the same reason, or to MAXREAD as appropriate. */
3345 c_strlen_data lendata = { };
3346 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3347 range[0] = lendata.minlen;
3348 range[1] = lendata.maxbound;
3349 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3351 if (maxread && tree_int_cst_le (maxread, range[0]))
3352 range[0] = range[1] = maxread;
3353 else
3354 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3355 range[0], size_one_node);
3357 if (maxread && tree_int_cst_le (maxread, range[1]))
3358 range[1] = maxread;
3359 else if (!integer_all_onesp (range[1]))
3360 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3361 range[1], size_one_node);
3363 slen = range[0];
3365 else
3367 at_least_one = true;
3368 slen = size_one_node;
3371 else
3372 slen = srcstr;
3375 if (!dstwrite && !maxread)
3377 /* When the only available piece of data is the object size
3378 there is nothing to do. */
3379 if (!slen)
3380 return true;
3382 /* Otherwise, when the length of the source sequence is known
3383 (as with strlen), set DSTWRITE to it. */
3384 if (!range[0])
3385 dstwrite = slen;
3388 if (!dstsize)
3389 dstsize = maxobjsize;
3391 if (dstwrite)
3392 get_size_range (dstwrite, range);
3394 tree func = get_callee_fndecl (exp);
3396 /* First check the number of bytes to be written against the maximum
3397 object size. */
3398 if (range[0]
3399 && TREE_CODE (range[0]) == INTEGER_CST
3400 && tree_int_cst_lt (maxobjsize, range[0]))
3402 if (TREE_NO_WARNING (exp))
3403 return false;
3405 location_t loc = tree_nonartificial_location (exp);
3406 loc = expansion_point_location_if_in_system_header (loc);
3408 bool warned;
3409 if (range[0] == range[1])
3410 warned = warning_at (loc, opt,
3411 "%K%qD specified size %E "
3412 "exceeds maximum object size %E",
3413 exp, func, range[0], maxobjsize);
3414 else
3415 warned = warning_at (loc, opt,
3416 "%K%qD specified size between %E and %E "
3417 "exceeds maximum object size %E",
3418 exp, func,
3419 range[0], range[1], maxobjsize);
3420 if (warned)
3421 TREE_NO_WARNING (exp) = true;
3423 return false;
3426 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3427 constant, and in range of unsigned HOST_WIDE_INT. */
3428 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3430 /* Next check the number of bytes to be written against the destination
3431 object size. */
3432 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3434 if (range[0]
3435 && TREE_CODE (range[0]) == INTEGER_CST
3436 && ((tree_fits_uhwi_p (dstsize)
3437 && tree_int_cst_lt (dstsize, range[0]))
3438 || (dstwrite
3439 && tree_fits_uhwi_p (dstwrite)
3440 && tree_int_cst_lt (dstwrite, range[0]))))
3442 if (TREE_NO_WARNING (exp))
3443 return false;
3445 location_t loc = tree_nonartificial_location (exp);
3446 loc = expansion_point_location_if_in_system_header (loc);
3448 if (dstwrite == slen && at_least_one)
3450 /* This is a call to strcpy with a destination of 0 size
3451 and a source of unknown length. The call will write
3452 at least one byte past the end of the destination. */
3453 warning_at (loc, opt,
3454 "%K%qD writing %E or more bytes into a region "
3455 "of size %E overflows the destination",
3456 exp, func, range[0], dstsize);
3458 else if (tree_int_cst_equal (range[0], range[1]))
3459 warning_n (loc, opt, tree_to_uhwi (range[0]),
3460 "%K%qD writing %E byte into a region "
3461 "of size %E overflows the destination",
3462 "%K%qD writing %E bytes into a region "
3463 "of size %E overflows the destination",
3464 exp, func, range[0], dstsize);
3465 else if (tree_int_cst_sign_bit (range[1]))
3467 /* Avoid printing the upper bound if it's invalid. */
3468 warning_at (loc, opt,
3469 "%K%qD writing %E or more bytes into a region "
3470 "of size %E overflows the destination",
3471 exp, func, range[0], dstsize);
3473 else
3474 warning_at (loc, opt,
3475 "%K%qD writing between %E and %E bytes into "
3476 "a region of size %E overflows the destination",
3477 exp, func, range[0], range[1],
3478 dstsize);
3480 /* Return error when an overflow has been detected. */
3481 return false;
3485 /* Check the maximum length of the source sequence against the size
3486 of the destination object if known, or against the maximum size
3487 of an object. */
3488 if (maxread)
3490 get_size_range (maxread, range);
3492 /* Use the lower end for MAXREAD from now on. */
3493 if (range[0])
3494 maxread = range[0];
3496 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3498 location_t loc = tree_nonartificial_location (exp);
3499 loc = expansion_point_location_if_in_system_header (loc);
3501 if (tree_int_cst_lt (maxobjsize, range[0]))
3503 if (TREE_NO_WARNING (exp))
3504 return false;
3506 /* Warn about crazy big sizes first since that's more
3507 likely to be meaningful than saying that the bound
3508 is greater than the object size if both are big. */
3509 if (range[0] == range[1])
3510 warning_at (loc, opt,
3511 "%K%qD specified bound %E "
3512 "exceeds maximum object size %E",
3513 exp, func,
3514 range[0], maxobjsize);
3515 else
3516 warning_at (loc, opt,
3517 "%K%qD specified bound between %E and %E "
3518 "exceeds maximum object size %E",
3519 exp, func,
3520 range[0], range[1], maxobjsize);
3522 return false;
3525 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3527 if (TREE_NO_WARNING (exp))
3528 return false;
3530 if (tree_int_cst_equal (range[0], range[1]))
3531 warning_at (loc, opt,
3532 "%K%qD specified bound %E "
3533 "exceeds destination size %E",
3534 exp, func,
3535 range[0], dstsize);
3536 else
3537 warning_at (loc, opt,
3538 "%K%qD specified bound between %E and %E "
3539 "exceeds destination size %E",
3540 exp, func,
3541 range[0], range[1], dstsize);
3542 return false;
3547 /* Check for reading past the end of SRC. */
3548 if (slen
3549 && slen == srcstr
3550 && dstwrite && range[0]
3551 && tree_int_cst_lt (slen, range[0]))
3553 if (TREE_NO_WARNING (exp))
3554 return false;
3556 location_t loc = tree_nonartificial_location (exp);
3558 if (tree_int_cst_equal (range[0], range[1]))
3559 warning_n (loc, opt, tree_to_uhwi (range[0]),
3560 "%K%qD reading %E byte from a region of size %E",
3561 "%K%qD reading %E bytes from a region of size %E",
3562 exp, func, range[0], slen);
3563 else if (tree_int_cst_sign_bit (range[1]))
3565 /* Avoid printing the upper bound if it's invalid. */
3566 warning_at (loc, opt,
3567 "%K%qD reading %E or more bytes from a region "
3568 "of size %E",
3569 exp, func, range[0], slen);
3571 else
3572 warning_at (loc, opt,
3573 "%K%qD reading between %E and %E bytes from a region "
3574 "of size %E",
3575 exp, func, range[0], range[1], slen);
3576 return false;
3579 return true;
3582 /* Helper to compute the size of the object referenced by the DEST
3583 expression which must have pointer type, using Object Size type
3584 OSTYPE (only the least significant 2 bits are used). Return
3585 an estimate of the size of the object if successful or NULL when
3586 the size cannot be determined. When the referenced object involves
3587 a non-constant offset in some range the returned value represents
3588 the largest size given the smallest non-negative offset in the
3589 range. The function is intended for diagnostics and should not
3590 be used to influence code generation or optimization. */
3592 tree
3593 compute_objsize (tree dest, int ostype)
3595 unsigned HOST_WIDE_INT size;
3597 /* Only the two least significant bits are meaningful. */
3598 ostype &= 3;
3600 if (compute_builtin_object_size (dest, ostype, &size))
3601 return build_int_cst (sizetype, size);
3603 if (TREE_CODE (dest) == SSA_NAME)
3605 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3606 if (!is_gimple_assign (stmt))
3607 return NULL_TREE;
3609 dest = gimple_assign_rhs1 (stmt);
3611 tree_code code = gimple_assign_rhs_code (stmt);
3612 if (code == POINTER_PLUS_EXPR)
3614 /* compute_builtin_object_size fails for addresses with
3615 non-constant offsets. Try to determine the range of
3616 such an offset here and use it to adjust the constant
3617 size. */
3618 tree off = gimple_assign_rhs2 (stmt);
3619 if (TREE_CODE (off) == INTEGER_CST)
3621 if (tree size = compute_objsize (dest, ostype))
3623 wide_int wioff = wi::to_wide (off);
3624 wide_int wisiz = wi::to_wide (size);
3626 /* Ignore negative offsets for now. For others,
3627 use the lower bound as the most optimistic
3628 estimate of the (remaining) size. */
3629 if (wi::sign_mask (wioff))
3631 else if (wi::ltu_p (wioff, wisiz))
3632 return wide_int_to_tree (TREE_TYPE (size),
3633 wi::sub (wisiz, wioff));
3634 else
3635 return size_zero_node;
3638 else if (TREE_CODE (off) == SSA_NAME
3639 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3641 wide_int min, max;
3642 enum value_range_kind rng = get_range_info (off, &min, &max);
3644 if (rng == VR_RANGE)
3646 if (tree size = compute_objsize (dest, ostype))
3648 wide_int wisiz = wi::to_wide (size);
3650 /* Ignore negative offsets for now. For others,
3651 use the lower bound as the most optimistic
3652 estimate of the (remaining)size. */
3653 if (wi::sign_mask (min)
3654 || wi::sign_mask (max))
3656 else if (wi::ltu_p (min, wisiz))
3657 return wide_int_to_tree (TREE_TYPE (size),
3658 wi::sub (wisiz, min));
3659 else
3660 return size_zero_node;
3665 else if (code != ADDR_EXPR)
3666 return NULL_TREE;
3669 /* Unless computing the largest size (for memcpy and other raw memory
3670 functions), try to determine the size of the object from its type. */
3671 if (!ostype)
3672 return NULL_TREE;
3674 if (TREE_CODE (dest) != ADDR_EXPR)
3675 return NULL_TREE;
3677 tree type = TREE_TYPE (dest);
3678 if (TREE_CODE (type) == POINTER_TYPE)
3679 type = TREE_TYPE (type);
3681 type = TYPE_MAIN_VARIANT (type);
3683 if (TREE_CODE (type) == ARRAY_TYPE
3684 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3686 /* Return the constant size unless it's zero (that's a zero-length
3687 array likely at the end of a struct). */
3688 tree size = TYPE_SIZE_UNIT (type);
3689 if (size && TREE_CODE (size) == INTEGER_CST
3690 && !integer_zerop (size))
3691 return size;
3694 return NULL_TREE;
3697 /* Helper to determine and check the sizes of the source and the destination
3698 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3699 call expression, DEST is the destination argument, SRC is the source
3700 argument or null, and LEN is the number of bytes. Use Object Size type-0
3701 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3702 (no overflow or invalid sizes), false otherwise. */
3704 static bool
3705 check_memop_access (tree exp, tree dest, tree src, tree size)
3707 /* For functions like memset and memcpy that operate on raw memory
3708 try to determine the size of the largest source and destination
3709 object using type-0 Object Size regardless of the object size
3710 type specified by the option. */
3711 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3712 tree dstsize = compute_objsize (dest, 0);
3714 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3715 srcsize, dstsize);
3718 /* Validate memchr arguments without performing any expansion.
3719 Return NULL_RTX. */
3721 static rtx
3722 expand_builtin_memchr (tree exp, rtx)
3724 if (!validate_arglist (exp,
3725 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3726 return NULL_RTX;
3728 tree arg1 = CALL_EXPR_ARG (exp, 0);
3729 tree len = CALL_EXPR_ARG (exp, 2);
3731 /* Diagnose calls where the specified length exceeds the size
3732 of the object. */
3733 if (warn_stringop_overflow)
3735 tree size = compute_objsize (arg1, 0);
3736 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3737 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3740 return NULL_RTX;
3743 /* Expand a call EXP to the memcpy builtin.
3744 Return NULL_RTX if we failed, the caller should emit a normal call,
3745 otherwise try to get the result in TARGET, if convenient (and in
3746 mode MODE if that's convenient). */
3748 static rtx
3749 expand_builtin_memcpy (tree exp, rtx target)
3751 if (!validate_arglist (exp,
3752 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3753 return NULL_RTX;
3755 tree dest = CALL_EXPR_ARG (exp, 0);
3756 tree src = CALL_EXPR_ARG (exp, 1);
3757 tree len = CALL_EXPR_ARG (exp, 2);
3759 check_memop_access (exp, dest, src, len);
3761 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3762 /*retmode=*/ RETURN_BEGIN);
3765 /* Check a call EXP to the memmove built-in for validity.
3766 Return NULL_RTX on both success and failure. */
3768 static rtx
3769 expand_builtin_memmove (tree exp, rtx)
3771 if (!validate_arglist (exp,
3772 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3773 return NULL_RTX;
3775 tree dest = CALL_EXPR_ARG (exp, 0);
3776 tree src = CALL_EXPR_ARG (exp, 1);
3777 tree len = CALL_EXPR_ARG (exp, 2);
3779 check_memop_access (exp, dest, src, len);
3781 return NULL_RTX;
3784 /* Expand a call EXP to the mempcpy builtin.
3785 Return NULL_RTX if we failed; the caller should emit a normal call,
3786 otherwise try to get the result in TARGET, if convenient (and in
3787 mode MODE if that's convenient). */
3789 static rtx
3790 expand_builtin_mempcpy (tree exp, rtx target)
3792 if (!validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3794 return NULL_RTX;
3796 tree dest = CALL_EXPR_ARG (exp, 0);
3797 tree src = CALL_EXPR_ARG (exp, 1);
3798 tree len = CALL_EXPR_ARG (exp, 2);
3800 /* Policy does not generally allow using compute_objsize (which
3801 is used internally by check_memop_size) to change code generation
3802 or drive optimization decisions.
3804 In this instance it is safe because the code we generate has
3805 the same semantics regardless of the return value of
3806 check_memop_sizes. Exactly the same amount of data is copied
3807 and the return value is exactly the same in both cases.
3809 Furthermore, check_memop_size always uses mode 0 for the call to
3810 compute_objsize, so the imprecise nature of compute_objsize is
3811 avoided. */
3813 /* Avoid expanding mempcpy into memcpy when the call is determined
3814 to overflow the buffer. This also prevents the same overflow
3815 from being diagnosed again when expanding memcpy. */
3816 if (!check_memop_access (exp, dest, src, len))
3817 return NULL_RTX;
3819 return expand_builtin_mempcpy_args (dest, src, len,
3820 target, exp, /*retmode=*/ RETURN_END);
3823 /* Helper function to do the actual work for expand of memory copy family
3824 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3825 of memory from SRC to DEST and assign to TARGET if convenient. Return
3826 value is based on RETMODE argument. */
3828 static rtx
3829 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3830 rtx target, tree exp, memop_ret retmode)
3832 const char *src_str;
3833 unsigned int src_align = get_pointer_alignment (src);
3834 unsigned int dest_align = get_pointer_alignment (dest);
3835 rtx dest_mem, src_mem, dest_addr, len_rtx;
3836 HOST_WIDE_INT expected_size = -1;
3837 unsigned int expected_align = 0;
3838 unsigned HOST_WIDE_INT min_size;
3839 unsigned HOST_WIDE_INT max_size;
3840 unsigned HOST_WIDE_INT probable_max_size;
3842 /* If DEST is not a pointer type, call the normal function. */
3843 if (dest_align == 0)
3844 return NULL_RTX;
3846 /* If either SRC is not a pointer type, don't do this
3847 operation in-line. */
3848 if (src_align == 0)
3849 return NULL_RTX;
3851 if (currently_expanding_gimple_stmt)
3852 stringop_block_profile (currently_expanding_gimple_stmt,
3853 &expected_align, &expected_size);
3855 if (expected_align < dest_align)
3856 expected_align = dest_align;
3857 dest_mem = get_memory_rtx (dest, len);
3858 set_mem_align (dest_mem, dest_align);
3859 len_rtx = expand_normal (len);
3860 determine_block_size (len, len_rtx, &min_size, &max_size,
3861 &probable_max_size);
3862 src_str = c_getstr (src);
3864 /* If SRC is a string constant and block move would be done
3865 by pieces, we can avoid loading the string from memory
3866 and only stored the computed constants. */
3867 if (src_str
3868 && CONST_INT_P (len_rtx)
3869 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3870 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3871 CONST_CAST (char *, src_str),
3872 dest_align, false))
3874 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3875 builtin_memcpy_read_str,
3876 CONST_CAST (char *, src_str),
3877 dest_align, false, retmode);
3878 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3879 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3880 return dest_mem;
3883 src_mem = get_memory_rtx (src, len);
3884 set_mem_align (src_mem, src_align);
3886 /* Copy word part most expediently. */
3887 enum block_op_methods method = BLOCK_OP_NORMAL;
3888 if (CALL_EXPR_TAILCALL (exp)
3889 && (retmode == RETURN_BEGIN || target == const0_rtx))
3890 method = BLOCK_OP_TAILCALL;
3891 if (retmode == RETURN_END && target != const0_rtx)
3892 method = BLOCK_OP_NO_LIBCALL_RET;
3893 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3894 expected_align, expected_size,
3895 min_size, max_size, probable_max_size);
3896 if (dest_addr == pc_rtx)
3897 return NULL_RTX;
3899 if (dest_addr == 0)
3901 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3902 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3905 if (retmode != RETURN_BEGIN && target != const0_rtx)
3907 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3908 /* stpcpy pointer to last byte. */
3909 if (retmode == RETURN_END_MINUS_ONE)
3910 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3913 return dest_addr;
3916 static rtx
3917 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3918 rtx target, tree orig_exp, memop_ret retmode)
3920 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3921 retmode);
3924 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3925 we failed, the caller should emit a normal call, otherwise try to
3926 get the result in TARGET, if convenient.
3927 Return value is based on RETMODE argument. */
3929 static rtx
3930 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3932 struct expand_operand ops[3];
3933 rtx dest_mem;
3934 rtx src_mem;
3936 if (!targetm.have_movstr ())
3937 return NULL_RTX;
3939 dest_mem = get_memory_rtx (dest, NULL);
3940 src_mem = get_memory_rtx (src, NULL);
3941 if (retmode == RETURN_BEGIN)
3943 target = force_reg (Pmode, XEXP (dest_mem, 0));
3944 dest_mem = replace_equiv_address (dest_mem, target);
3947 create_output_operand (&ops[0],
3948 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3949 create_fixed_operand (&ops[1], dest_mem);
3950 create_fixed_operand (&ops[2], src_mem);
3951 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3952 return NULL_RTX;
3954 if (retmode != RETURN_BEGIN && target != const0_rtx)
3956 target = ops[0].value;
3957 /* movstr is supposed to set end to the address of the NUL
3958 terminator. If the caller requested a mempcpy-like return value,
3959 adjust it. */
3960 if (retmode == RETURN_END)
3962 rtx tem = plus_constant (GET_MODE (target),
3963 gen_lowpart (GET_MODE (target), target), 1);
3964 emit_move_insn (target, force_operand (tem, NULL_RTX));
3967 return target;
3970 /* Do some very basic size validation of a call to the strcpy builtin
3971 given by EXP. Return NULL_RTX to have the built-in expand to a call
3972 to the library function. */
3974 static rtx
3975 expand_builtin_strcat (tree exp, rtx)
3977 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3978 || !warn_stringop_overflow)
3979 return NULL_RTX;
3981 tree dest = CALL_EXPR_ARG (exp, 0);
3982 tree src = CALL_EXPR_ARG (exp, 1);
3984 /* There is no way here to determine the length of the string in
3985 the destination to which the SRC string is being appended so
3986 just diagnose cases when the souce string is longer than
3987 the destination object. */
3989 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3991 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3992 destsize);
3994 return NULL_RTX;
3997 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3998 NULL_RTX if we failed the caller should emit a normal call, otherwise
3999 try to get the result in TARGET, if convenient (and in mode MODE if that's
4000 convenient). */
4002 static rtx
4003 expand_builtin_strcpy (tree exp, rtx target)
4005 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4006 return NULL_RTX;
4008 tree dest = CALL_EXPR_ARG (exp, 0);
4009 tree src = CALL_EXPR_ARG (exp, 1);
4011 if (warn_stringop_overflow)
4013 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4014 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4015 src, destsize);
4018 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4020 /* Check to see if the argument was declared attribute nonstring
4021 and if so, issue a warning since at this point it's not known
4022 to be nul-terminated. */
4023 tree fndecl = get_callee_fndecl (exp);
4024 maybe_warn_nonstring_arg (fndecl, exp);
4025 return ret;
4028 return NULL_RTX;
4031 /* Helper function to do the actual work for expand_builtin_strcpy. The
4032 arguments to the builtin_strcpy call DEST and SRC are broken out
4033 so that this can also be called without constructing an actual CALL_EXPR.
4034 The other arguments and return value are the same as for
4035 expand_builtin_strcpy. */
4037 static rtx
4038 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4040 /* Detect strcpy calls with unterminated arrays.. */
4041 if (tree nonstr = unterminated_array (src))
4043 /* NONSTR refers to the non-nul terminated constant array. */
4044 if (!TREE_NO_WARNING (exp))
4045 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4046 return NULL_RTX;
4049 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4052 /* Expand a call EXP to the stpcpy builtin.
4053 Return NULL_RTX if we failed the caller should emit a normal call,
4054 otherwise try to get the result in TARGET, if convenient (and in
4055 mode MODE if that's convenient). */
4057 static rtx
4058 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4060 tree dst, src;
4061 location_t loc = EXPR_LOCATION (exp);
4063 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4064 return NULL_RTX;
4066 dst = CALL_EXPR_ARG (exp, 0);
4067 src = CALL_EXPR_ARG (exp, 1);
4069 if (warn_stringop_overflow)
4071 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4072 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4073 src, destsize);
4076 /* If return value is ignored, transform stpcpy into strcpy. */
4077 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4079 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4080 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4081 return expand_expr (result, target, mode, EXPAND_NORMAL);
4083 else
4085 tree len, lenp1;
4086 rtx ret;
4088 /* Ensure we get an actual string whose length can be evaluated at
4089 compile-time, not an expression containing a string. This is
4090 because the latter will potentially produce pessimized code
4091 when used to produce the return value. */
4092 c_strlen_data lendata = { };
4093 if (!c_getstr (src, NULL)
4094 || !(len = c_strlen (src, 0, &lendata, 1)))
4095 return expand_movstr (dst, src, target,
4096 /*retmode=*/ RETURN_END_MINUS_ONE);
4098 if (lendata.decl && !TREE_NO_WARNING (exp))
4099 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4101 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4102 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4103 target, exp,
4104 /*retmode=*/ RETURN_END_MINUS_ONE);
4106 if (ret)
4107 return ret;
4109 if (TREE_CODE (len) == INTEGER_CST)
4111 rtx len_rtx = expand_normal (len);
4113 if (CONST_INT_P (len_rtx))
4115 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4117 if (ret)
4119 if (! target)
4121 if (mode != VOIDmode)
4122 target = gen_reg_rtx (mode);
4123 else
4124 target = gen_reg_rtx (GET_MODE (ret));
4126 if (GET_MODE (target) != GET_MODE (ret))
4127 ret = gen_lowpart (GET_MODE (target), ret);
4129 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4130 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4131 gcc_assert (ret);
4133 return target;
4138 return expand_movstr (dst, src, target,
4139 /*retmode=*/ RETURN_END_MINUS_ONE);
4143 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4144 arguments while being careful to avoid duplicate warnings (which could
4145 be issued if the expander were to expand the call, resulting in it
4146 being emitted in expand_call(). */
4148 static rtx
4149 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4151 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4153 /* The call has been successfully expanded. Check for nonstring
4154 arguments and issue warnings as appropriate. */
4155 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4156 return ret;
4159 return NULL_RTX;
4162 /* Check a call EXP to the stpncpy built-in for validity.
4163 Return NULL_RTX on both success and failure. */
4165 static rtx
4166 expand_builtin_stpncpy (tree exp, rtx)
4168 if (!validate_arglist (exp,
4169 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4170 || !warn_stringop_overflow)
4171 return NULL_RTX;
4173 /* The source and destination of the call. */
4174 tree dest = CALL_EXPR_ARG (exp, 0);
4175 tree src = CALL_EXPR_ARG (exp, 1);
4177 /* The exact number of bytes to write (not the maximum). */
4178 tree len = CALL_EXPR_ARG (exp, 2);
4180 /* The size of the destination object. */
4181 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4183 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4185 return NULL_RTX;
4188 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4189 bytes from constant string DATA + OFFSET and return it as target
4190 constant. */
4193 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4194 scalar_int_mode mode)
4196 const char *str = (const char *) data;
4198 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4199 return const0_rtx;
4201 return c_readstr (str + offset, mode);
4204 /* Helper to check the sizes of sequences and the destination of calls
4205 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4206 success (no overflow or invalid sizes), false otherwise. */
4208 static bool
4209 check_strncat_sizes (tree exp, tree objsize)
4211 tree dest = CALL_EXPR_ARG (exp, 0);
4212 tree src = CALL_EXPR_ARG (exp, 1);
4213 tree maxread = CALL_EXPR_ARG (exp, 2);
4215 /* Try to determine the range of lengths that the source expression
4216 refers to. */
4217 c_strlen_data lendata = { };
4218 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4220 /* Try to verify that the destination is big enough for the shortest
4221 string. */
4223 if (!objsize && warn_stringop_overflow)
4225 /* If it hasn't been provided by __strncat_chk, try to determine
4226 the size of the destination object into which the source is
4227 being copied. */
4228 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4231 /* Add one for the terminating nul. */
4232 tree srclen = (lendata.minlen
4233 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4234 size_one_node)
4235 : NULL_TREE);
4237 /* The strncat function copies at most MAXREAD bytes and always appends
4238 the terminating nul so the specified upper bound should never be equal
4239 to (or greater than) the size of the destination. */
4240 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4241 && tree_int_cst_equal (objsize, maxread))
4243 location_t loc = tree_nonartificial_location (exp);
4244 loc = expansion_point_location_if_in_system_header (loc);
4246 warning_at (loc, OPT_Wstringop_overflow_,
4247 "%K%qD specified bound %E equals destination size",
4248 exp, get_callee_fndecl (exp), maxread);
4250 return false;
4253 if (!srclen
4254 || (maxread && tree_fits_uhwi_p (maxread)
4255 && tree_fits_uhwi_p (srclen)
4256 && tree_int_cst_lt (maxread, srclen)))
4257 srclen = maxread;
4259 /* The number of bytes to write is LEN but check_access will also
4260 check SRCLEN if LEN's value isn't known. */
4261 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4262 objsize);
4265 /* Similar to expand_builtin_strcat, do some very basic size validation
4266 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4267 the built-in expand to a call to the library function. */
4269 static rtx
4270 expand_builtin_strncat (tree exp, rtx)
4272 if (!validate_arglist (exp,
4273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4274 || !warn_stringop_overflow)
4275 return NULL_RTX;
4277 tree dest = CALL_EXPR_ARG (exp, 0);
4278 tree src = CALL_EXPR_ARG (exp, 1);
4279 /* The upper bound on the number of bytes to write. */
4280 tree maxread = CALL_EXPR_ARG (exp, 2);
4281 /* The length of the source sequence. */
4282 tree slen = c_strlen (src, 1);
4284 /* Try to determine the range of lengths that the source expression
4285 refers to. Since the lengths are only used for warning and not
4286 for code generation disable strict mode below. */
4287 tree maxlen = slen;
4288 if (!maxlen)
4290 c_strlen_data lendata = { };
4291 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4292 maxlen = lendata.maxbound;
4295 /* Try to verify that the destination is big enough for the shortest
4296 string. First try to determine the size of the destination object
4297 into which the source is being copied. */
4298 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4300 /* Add one for the terminating nul. */
4301 tree srclen = (maxlen
4302 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4303 size_one_node)
4304 : NULL_TREE);
4306 /* The strncat function copies at most MAXREAD bytes and always appends
4307 the terminating nul so the specified upper bound should never be equal
4308 to (or greater than) the size of the destination. */
4309 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4310 && tree_int_cst_equal (destsize, maxread))
4312 location_t loc = tree_nonartificial_location (exp);
4313 loc = expansion_point_location_if_in_system_header (loc);
4315 warning_at (loc, OPT_Wstringop_overflow_,
4316 "%K%qD specified bound %E equals destination size",
4317 exp, get_callee_fndecl (exp), maxread);
4319 return NULL_RTX;
4322 if (!srclen
4323 || (maxread && tree_fits_uhwi_p (maxread)
4324 && tree_fits_uhwi_p (srclen)
4325 && tree_int_cst_lt (maxread, srclen)))
4326 srclen = maxread;
4328 /* The number of bytes to write is SRCLEN. */
4329 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4331 return NULL_RTX;
4334 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4335 NULL_RTX if we failed the caller should emit a normal call. */
4337 static rtx
4338 expand_builtin_strncpy (tree exp, rtx target)
4340 location_t loc = EXPR_LOCATION (exp);
4342 if (validate_arglist (exp,
4343 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4345 tree dest = CALL_EXPR_ARG (exp, 0);
4346 tree src = CALL_EXPR_ARG (exp, 1);
4347 /* The number of bytes to write (not the maximum). */
4348 tree len = CALL_EXPR_ARG (exp, 2);
4349 /* The length of the source sequence. */
4350 tree slen = c_strlen (src, 1);
4352 if (warn_stringop_overflow)
4354 tree destsize = compute_objsize (dest,
4355 warn_stringop_overflow - 1);
4357 /* The number of bytes to write is LEN but check_access will also
4358 check SLEN if LEN's value isn't known. */
4359 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4360 destsize);
4363 /* We must be passed a constant len and src parameter. */
4364 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4365 return NULL_RTX;
4367 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4369 /* We're required to pad with trailing zeros if the requested
4370 len is greater than strlen(s2)+1. In that case try to
4371 use store_by_pieces, if it fails, punt. */
4372 if (tree_int_cst_lt (slen, len))
4374 unsigned int dest_align = get_pointer_alignment (dest);
4375 const char *p = c_getstr (src);
4376 rtx dest_mem;
4378 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4379 || !can_store_by_pieces (tree_to_uhwi (len),
4380 builtin_strncpy_read_str,
4381 CONST_CAST (char *, p),
4382 dest_align, false))
4383 return NULL_RTX;
4385 dest_mem = get_memory_rtx (dest, len);
4386 store_by_pieces (dest_mem, tree_to_uhwi (len),
4387 builtin_strncpy_read_str,
4388 CONST_CAST (char *, p), dest_align, false,
4389 RETURN_BEGIN);
4390 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4391 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4392 return dest_mem;
4395 return NULL_RTX;
4398 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4399 bytes from constant string DATA + OFFSET and return it as target
4400 constant. */
4403 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4404 scalar_int_mode mode)
4406 const char *c = (const char *) data;
4407 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4409 memset (p, *c, GET_MODE_SIZE (mode));
4411 return c_readstr (p, mode);
4414 /* Callback routine for store_by_pieces. Return the RTL of a register
4415 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4416 char value given in the RTL register data. For example, if mode is
4417 4 bytes wide, return the RTL for 0x01010101*data. */
4419 static rtx
4420 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4421 scalar_int_mode mode)
4423 rtx target, coeff;
4424 size_t size;
4425 char *p;
4427 size = GET_MODE_SIZE (mode);
4428 if (size == 1)
4429 return (rtx) data;
4431 p = XALLOCAVEC (char, size);
4432 memset (p, 1, size);
4433 coeff = c_readstr (p, mode);
4435 target = convert_to_mode (mode, (rtx) data, 1);
4436 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4437 return force_reg (mode, target);
4440 /* Expand expression EXP, which is a call to the memset builtin. Return
4441 NULL_RTX if we failed the caller should emit a normal call, otherwise
4442 try to get the result in TARGET, if convenient (and in mode MODE if that's
4443 convenient). */
4445 static rtx
4446 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4448 if (!validate_arglist (exp,
4449 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4450 return NULL_RTX;
4452 tree dest = CALL_EXPR_ARG (exp, 0);
4453 tree val = CALL_EXPR_ARG (exp, 1);
4454 tree len = CALL_EXPR_ARG (exp, 2);
4456 check_memop_access (exp, dest, NULL_TREE, len);
4458 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4461 /* Helper function to do the actual work for expand_builtin_memset. The
4462 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4463 so that this can also be called without constructing an actual CALL_EXPR.
4464 The other arguments and return value are the same as for
4465 expand_builtin_memset. */
4467 static rtx
4468 expand_builtin_memset_args (tree dest, tree val, tree len,
4469 rtx target, machine_mode mode, tree orig_exp)
4471 tree fndecl, fn;
4472 enum built_in_function fcode;
4473 machine_mode val_mode;
4474 char c;
4475 unsigned int dest_align;
4476 rtx dest_mem, dest_addr, len_rtx;
4477 HOST_WIDE_INT expected_size = -1;
4478 unsigned int expected_align = 0;
4479 unsigned HOST_WIDE_INT min_size;
4480 unsigned HOST_WIDE_INT max_size;
4481 unsigned HOST_WIDE_INT probable_max_size;
4483 dest_align = get_pointer_alignment (dest);
4485 /* If DEST is not a pointer type, don't do this operation in-line. */
4486 if (dest_align == 0)
4487 return NULL_RTX;
4489 if (currently_expanding_gimple_stmt)
4490 stringop_block_profile (currently_expanding_gimple_stmt,
4491 &expected_align, &expected_size);
4493 if (expected_align < dest_align)
4494 expected_align = dest_align;
4496 /* If the LEN parameter is zero, return DEST. */
4497 if (integer_zerop (len))
4499 /* Evaluate and ignore VAL in case it has side-effects. */
4500 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4501 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4504 /* Stabilize the arguments in case we fail. */
4505 dest = builtin_save_expr (dest);
4506 val = builtin_save_expr (val);
4507 len = builtin_save_expr (len);
4509 len_rtx = expand_normal (len);
4510 determine_block_size (len, len_rtx, &min_size, &max_size,
4511 &probable_max_size);
4512 dest_mem = get_memory_rtx (dest, len);
4513 val_mode = TYPE_MODE (unsigned_char_type_node);
4515 if (TREE_CODE (val) != INTEGER_CST)
4517 rtx val_rtx;
4519 val_rtx = expand_normal (val);
4520 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4522 /* Assume that we can memset by pieces if we can store
4523 * the coefficients by pieces (in the required modes).
4524 * We can't pass builtin_memset_gen_str as that emits RTL. */
4525 c = 1;
4526 if (tree_fits_uhwi_p (len)
4527 && can_store_by_pieces (tree_to_uhwi (len),
4528 builtin_memset_read_str, &c, dest_align,
4529 true))
4531 val_rtx = force_reg (val_mode, val_rtx);
4532 store_by_pieces (dest_mem, tree_to_uhwi (len),
4533 builtin_memset_gen_str, val_rtx, dest_align,
4534 true, RETURN_BEGIN);
4536 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4537 dest_align, expected_align,
4538 expected_size, min_size, max_size,
4539 probable_max_size))
4540 goto do_libcall;
4542 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4543 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4544 return dest_mem;
4547 if (target_char_cast (val, &c))
4548 goto do_libcall;
4550 if (c)
4552 if (tree_fits_uhwi_p (len)
4553 && can_store_by_pieces (tree_to_uhwi (len),
4554 builtin_memset_read_str, &c, dest_align,
4555 true))
4556 store_by_pieces (dest_mem, tree_to_uhwi (len),
4557 builtin_memset_read_str, &c, dest_align, true,
4558 RETURN_BEGIN);
4559 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4560 gen_int_mode (c, val_mode),
4561 dest_align, expected_align,
4562 expected_size, min_size, max_size,
4563 probable_max_size))
4564 goto do_libcall;
4566 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4567 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4568 return dest_mem;
4571 set_mem_align (dest_mem, dest_align);
4572 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4573 CALL_EXPR_TAILCALL (orig_exp)
4574 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4575 expected_align, expected_size,
4576 min_size, max_size,
4577 probable_max_size);
4579 if (dest_addr == 0)
4581 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4582 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4585 return dest_addr;
4587 do_libcall:
4588 fndecl = get_callee_fndecl (orig_exp);
4589 fcode = DECL_FUNCTION_CODE (fndecl);
4590 if (fcode == BUILT_IN_MEMSET)
4591 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4592 dest, val, len);
4593 else if (fcode == BUILT_IN_BZERO)
4594 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4595 dest, len);
4596 else
4597 gcc_unreachable ();
4598 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4599 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4600 return expand_call (fn, target, target == const0_rtx);
4603 /* Expand expression EXP, which is a call to the bzero builtin. Return
4604 NULL_RTX if we failed the caller should emit a normal call. */
4606 static rtx
4607 expand_builtin_bzero (tree exp)
4609 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4610 return NULL_RTX;
4612 tree dest = CALL_EXPR_ARG (exp, 0);
4613 tree size = CALL_EXPR_ARG (exp, 1);
4615 check_memop_access (exp, dest, NULL_TREE, size);
4617 /* New argument list transforming bzero(ptr x, int y) to
4618 memset(ptr x, int 0, size_t y). This is done this way
4619 so that if it isn't expanded inline, we fallback to
4620 calling bzero instead of memset. */
4622 location_t loc = EXPR_LOCATION (exp);
4624 return expand_builtin_memset_args (dest, integer_zero_node,
4625 fold_convert_loc (loc,
4626 size_type_node, size),
4627 const0_rtx, VOIDmode, exp);
4630 /* Try to expand cmpstr operation ICODE with the given operands.
4631 Return the result rtx on success, otherwise return null. */
4633 static rtx
4634 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4635 HOST_WIDE_INT align)
4637 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4639 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4640 target = NULL_RTX;
4642 struct expand_operand ops[4];
4643 create_output_operand (&ops[0], target, insn_mode);
4644 create_fixed_operand (&ops[1], arg1_rtx);
4645 create_fixed_operand (&ops[2], arg2_rtx);
4646 create_integer_operand (&ops[3], align);
4647 if (maybe_expand_insn (icode, 4, ops))
4648 return ops[0].value;
4649 return NULL_RTX;
4652 /* Expand expression EXP, which is a call to the memcmp built-in function.
4653 Return NULL_RTX if we failed and the caller should emit a normal call,
4654 otherwise try to get the result in TARGET, if convenient.
4655 RESULT_EQ is true if we can relax the returned value to be either zero
4656 or nonzero, without caring about the sign. */
4658 static rtx
4659 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4661 if (!validate_arglist (exp,
4662 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4663 return NULL_RTX;
4665 tree arg1 = CALL_EXPR_ARG (exp, 0);
4666 tree arg2 = CALL_EXPR_ARG (exp, 1);
4667 tree len = CALL_EXPR_ARG (exp, 2);
4668 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4669 bool no_overflow = true;
4671 /* Diagnose calls where the specified length exceeds the size of either
4672 object. */
4673 tree size = compute_objsize (arg1, 0);
4674 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4675 len, /*maxread=*/NULL_TREE, size,
4676 /*objsize=*/NULL_TREE);
4677 if (no_overflow)
4679 size = compute_objsize (arg2, 0);
4680 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4681 len, /*maxread=*/NULL_TREE, size,
4682 /*objsize=*/NULL_TREE);
4685 /* If the specified length exceeds the size of either object,
4686 call the function. */
4687 if (!no_overflow)
4688 return NULL_RTX;
4690 /* Due to the performance benefit, always inline the calls first
4691 when result_eq is false. */
4692 rtx result = NULL_RTX;
4694 if (!result_eq && fcode != BUILT_IN_BCMP)
4696 result = inline_expand_builtin_string_cmp (exp, target);
4697 if (result)
4698 return result;
4701 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4702 location_t loc = EXPR_LOCATION (exp);
4704 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4705 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4707 /* If we don't have POINTER_TYPE, call the function. */
4708 if (arg1_align == 0 || arg2_align == 0)
4709 return NULL_RTX;
4711 rtx arg1_rtx = get_memory_rtx (arg1, len);
4712 rtx arg2_rtx = get_memory_rtx (arg2, len);
4713 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4715 /* Set MEM_SIZE as appropriate. */
4716 if (CONST_INT_P (len_rtx))
4718 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4719 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4722 by_pieces_constfn constfn = NULL;
4724 const char *src_str = c_getstr (arg2);
4725 if (result_eq && src_str == NULL)
4727 src_str = c_getstr (arg1);
4728 if (src_str != NULL)
4729 std::swap (arg1_rtx, arg2_rtx);
4732 /* If SRC is a string constant and block move would be done
4733 by pieces, we can avoid loading the string from memory
4734 and only stored the computed constants. */
4735 if (src_str
4736 && CONST_INT_P (len_rtx)
4737 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4738 constfn = builtin_memcpy_read_str;
4740 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4741 TREE_TYPE (len), target,
4742 result_eq, constfn,
4743 CONST_CAST (char *, src_str));
4745 if (result)
4747 /* Return the value in the proper mode for this function. */
4748 if (GET_MODE (result) == mode)
4749 return result;
4751 if (target != 0)
4753 convert_move (target, result, 0);
4754 return target;
4757 return convert_to_mode (mode, result, 0);
4760 return NULL_RTX;
4763 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4764 if we failed the caller should emit a normal call, otherwise try to get
4765 the result in TARGET, if convenient. */
4767 static rtx
4768 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4771 return NULL_RTX;
4773 /* Due to the performance benefit, always inline the calls first. */
4774 rtx result = NULL_RTX;
4775 result = inline_expand_builtin_string_cmp (exp, target);
4776 if (result)
4777 return result;
4779 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4780 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4781 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4782 return NULL_RTX;
4784 tree arg1 = CALL_EXPR_ARG (exp, 0);
4785 tree arg2 = CALL_EXPR_ARG (exp, 1);
4787 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4788 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4790 /* If we don't have POINTER_TYPE, call the function. */
4791 if (arg1_align == 0 || arg2_align == 0)
4792 return NULL_RTX;
4794 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4795 arg1 = builtin_save_expr (arg1);
4796 arg2 = builtin_save_expr (arg2);
4798 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4799 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4801 /* Try to call cmpstrsi. */
4802 if (cmpstr_icode != CODE_FOR_nothing)
4803 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4804 MIN (arg1_align, arg2_align));
4806 /* Try to determine at least one length and call cmpstrnsi. */
4807 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4809 tree len;
4810 rtx arg3_rtx;
4812 tree len1 = c_strlen (arg1, 1);
4813 tree len2 = c_strlen (arg2, 1);
4815 if (len1)
4816 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4817 if (len2)
4818 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4820 /* If we don't have a constant length for the first, use the length
4821 of the second, if we know it. We don't require a constant for
4822 this case; some cost analysis could be done if both are available
4823 but neither is constant. For now, assume they're equally cheap,
4824 unless one has side effects. If both strings have constant lengths,
4825 use the smaller. */
4827 if (!len1)
4828 len = len2;
4829 else if (!len2)
4830 len = len1;
4831 else if (TREE_SIDE_EFFECTS (len1))
4832 len = len2;
4833 else if (TREE_SIDE_EFFECTS (len2))
4834 len = len1;
4835 else if (TREE_CODE (len1) != INTEGER_CST)
4836 len = len2;
4837 else if (TREE_CODE (len2) != INTEGER_CST)
4838 len = len1;
4839 else if (tree_int_cst_lt (len1, len2))
4840 len = len1;
4841 else
4842 len = len2;
4844 /* If both arguments have side effects, we cannot optimize. */
4845 if (len && !TREE_SIDE_EFFECTS (len))
4847 arg3_rtx = expand_normal (len);
4848 result = expand_cmpstrn_or_cmpmem
4849 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4850 arg3_rtx, MIN (arg1_align, arg2_align));
4854 tree fndecl = get_callee_fndecl (exp);
4855 if (result)
4857 /* Check to see if the argument was declared attribute nonstring
4858 and if so, issue a warning since at this point it's not known
4859 to be nul-terminated. */
4860 maybe_warn_nonstring_arg (fndecl, exp);
4862 /* Return the value in the proper mode for this function. */
4863 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4864 if (GET_MODE (result) == mode)
4865 return result;
4866 if (target == 0)
4867 return convert_to_mode (mode, result, 0);
4868 convert_move (target, result, 0);
4869 return target;
4872 /* Expand the library call ourselves using a stabilized argument
4873 list to avoid re-evaluating the function's arguments twice. */
4874 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4875 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4876 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4877 return expand_call (fn, target, target == const0_rtx);
4880 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4881 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4882 the result in TARGET, if convenient. */
4884 static rtx
4885 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4886 ATTRIBUTE_UNUSED machine_mode mode)
4888 if (!validate_arglist (exp,
4889 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4890 return NULL_RTX;
4892 /* Due to the performance benefit, always inline the calls first. */
4893 rtx result = NULL_RTX;
4894 result = inline_expand_builtin_string_cmp (exp, target);
4895 if (result)
4896 return result;
4898 /* If c_strlen can determine an expression for one of the string
4899 lengths, and it doesn't have side effects, then emit cmpstrnsi
4900 using length MIN(strlen(string)+1, arg3). */
4901 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4902 if (cmpstrn_icode == CODE_FOR_nothing)
4903 return NULL_RTX;
4905 tree len;
4907 tree arg1 = CALL_EXPR_ARG (exp, 0);
4908 tree arg2 = CALL_EXPR_ARG (exp, 1);
4909 tree arg3 = CALL_EXPR_ARG (exp, 2);
4911 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4912 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4914 tree len1 = c_strlen (arg1, 1);
4915 tree len2 = c_strlen (arg2, 1);
4917 location_t loc = EXPR_LOCATION (exp);
4919 if (len1)
4920 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4921 if (len2)
4922 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4924 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4926 /* If we don't have a constant length for the first, use the length
4927 of the second, if we know it. If neither string is constant length,
4928 use the given length argument. We don't require a constant for
4929 this case; some cost analysis could be done if both are available
4930 but neither is constant. For now, assume they're equally cheap,
4931 unless one has side effects. If both strings have constant lengths,
4932 use the smaller. */
4934 if (!len1 && !len2)
4935 len = len3;
4936 else if (!len1)
4937 len = len2;
4938 else if (!len2)
4939 len = len1;
4940 else if (TREE_SIDE_EFFECTS (len1))
4941 len = len2;
4942 else if (TREE_SIDE_EFFECTS (len2))
4943 len = len1;
4944 else if (TREE_CODE (len1) != INTEGER_CST)
4945 len = len2;
4946 else if (TREE_CODE (len2) != INTEGER_CST)
4947 len = len1;
4948 else if (tree_int_cst_lt (len1, len2))
4949 len = len1;
4950 else
4951 len = len2;
4953 /* If we are not using the given length, we must incorporate it here.
4954 The actual new length parameter will be MIN(len,arg3) in this case. */
4955 if (len != len3)
4957 len = fold_convert_loc (loc, sizetype, len);
4958 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4960 rtx arg1_rtx = get_memory_rtx (arg1, len);
4961 rtx arg2_rtx = get_memory_rtx (arg2, len);
4962 rtx arg3_rtx = expand_normal (len);
4963 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4964 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4965 MIN (arg1_align, arg2_align));
4967 tree fndecl = get_callee_fndecl (exp);
4968 if (result)
4970 /* Check to see if the argument was declared attribute nonstring
4971 and if so, issue a warning since at this point it's not known
4972 to be nul-terminated. */
4973 maybe_warn_nonstring_arg (fndecl, exp);
4975 /* Return the value in the proper mode for this function. */
4976 mode = TYPE_MODE (TREE_TYPE (exp));
4977 if (GET_MODE (result) == mode)
4978 return result;
4979 if (target == 0)
4980 return convert_to_mode (mode, result, 0);
4981 convert_move (target, result, 0);
4982 return target;
4985 /* Expand the library call ourselves using a stabilized argument
4986 list to avoid re-evaluating the function's arguments twice. */
4987 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4988 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4989 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4990 return expand_call (fn, target, target == const0_rtx);
4993 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4994 if that's convenient. */
4997 expand_builtin_saveregs (void)
4999 rtx val;
5000 rtx_insn *seq;
5002 /* Don't do __builtin_saveregs more than once in a function.
5003 Save the result of the first call and reuse it. */
5004 if (saveregs_value != 0)
5005 return saveregs_value;
5007 /* When this function is called, it means that registers must be
5008 saved on entry to this function. So we migrate the call to the
5009 first insn of this function. */
5011 start_sequence ();
5013 /* Do whatever the machine needs done in this case. */
5014 val = targetm.calls.expand_builtin_saveregs ();
5016 seq = get_insns ();
5017 end_sequence ();
5019 saveregs_value = val;
5021 /* Put the insns after the NOTE that starts the function. If this
5022 is inside a start_sequence, make the outer-level insn chain current, so
5023 the code is placed at the start of the function. */
5024 push_topmost_sequence ();
5025 emit_insn_after (seq, entry_of_function ());
5026 pop_topmost_sequence ();
5028 return val;
5031 /* Expand a call to __builtin_next_arg. */
5033 static rtx
5034 expand_builtin_next_arg (void)
5036 /* Checking arguments is already done in fold_builtin_next_arg
5037 that must be called before this function. */
5038 return expand_binop (ptr_mode, add_optab,
5039 crtl->args.internal_arg_pointer,
5040 crtl->args.arg_offset_rtx,
5041 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5044 /* Make it easier for the backends by protecting the valist argument
5045 from multiple evaluations. */
5047 static tree
5048 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5050 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5052 /* The current way of determining the type of valist is completely
5053 bogus. We should have the information on the va builtin instead. */
5054 if (!vatype)
5055 vatype = targetm.fn_abi_va_list (cfun->decl);
5057 if (TREE_CODE (vatype) == ARRAY_TYPE)
5059 if (TREE_SIDE_EFFECTS (valist))
5060 valist = save_expr (valist);
5062 /* For this case, the backends will be expecting a pointer to
5063 vatype, but it's possible we've actually been given an array
5064 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5065 So fix it. */
5066 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5068 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5069 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5072 else
5074 tree pt = build_pointer_type (vatype);
5076 if (! needs_lvalue)
5078 if (! TREE_SIDE_EFFECTS (valist))
5079 return valist;
5081 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5082 TREE_SIDE_EFFECTS (valist) = 1;
5085 if (TREE_SIDE_EFFECTS (valist))
5086 valist = save_expr (valist);
5087 valist = fold_build2_loc (loc, MEM_REF,
5088 vatype, valist, build_int_cst (pt, 0));
5091 return valist;
5094 /* The "standard" definition of va_list is void*. */
5096 tree
5097 std_build_builtin_va_list (void)
5099 return ptr_type_node;
5102 /* The "standard" abi va_list is va_list_type_node. */
5104 tree
5105 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5107 return va_list_type_node;
5110 /* The "standard" type of va_list is va_list_type_node. */
5112 tree
5113 std_canonical_va_list_type (tree type)
5115 tree wtype, htype;
5117 wtype = va_list_type_node;
5118 htype = type;
5120 if (TREE_CODE (wtype) == ARRAY_TYPE)
5122 /* If va_list is an array type, the argument may have decayed
5123 to a pointer type, e.g. by being passed to another function.
5124 In that case, unwrap both types so that we can compare the
5125 underlying records. */
5126 if (TREE_CODE (htype) == ARRAY_TYPE
5127 || POINTER_TYPE_P (htype))
5129 wtype = TREE_TYPE (wtype);
5130 htype = TREE_TYPE (htype);
5133 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5134 return va_list_type_node;
5136 return NULL_TREE;
5139 /* The "standard" implementation of va_start: just assign `nextarg' to
5140 the variable. */
5142 void
5143 std_expand_builtin_va_start (tree valist, rtx nextarg)
5145 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5146 convert_move (va_r, nextarg, 0);
5149 /* Expand EXP, a call to __builtin_va_start. */
5151 static rtx
5152 expand_builtin_va_start (tree exp)
5154 rtx nextarg;
5155 tree valist;
5156 location_t loc = EXPR_LOCATION (exp);
5158 if (call_expr_nargs (exp) < 2)
5160 error_at (loc, "too few arguments to function %<va_start%>");
5161 return const0_rtx;
5164 if (fold_builtin_next_arg (exp, true))
5165 return const0_rtx;
5167 nextarg = expand_builtin_next_arg ();
5168 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5170 if (targetm.expand_builtin_va_start)
5171 targetm.expand_builtin_va_start (valist, nextarg);
5172 else
5173 std_expand_builtin_va_start (valist, nextarg);
5175 return const0_rtx;
5178 /* Expand EXP, a call to __builtin_va_end. */
5180 static rtx
5181 expand_builtin_va_end (tree exp)
5183 tree valist = CALL_EXPR_ARG (exp, 0);
5185 /* Evaluate for side effects, if needed. I hate macros that don't
5186 do that. */
5187 if (TREE_SIDE_EFFECTS (valist))
5188 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5190 return const0_rtx;
5193 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5194 builtin rather than just as an assignment in stdarg.h because of the
5195 nastiness of array-type va_list types. */
5197 static rtx
5198 expand_builtin_va_copy (tree exp)
5200 tree dst, src, t;
5201 location_t loc = EXPR_LOCATION (exp);
5203 dst = CALL_EXPR_ARG (exp, 0);
5204 src = CALL_EXPR_ARG (exp, 1);
5206 dst = stabilize_va_list_loc (loc, dst, 1);
5207 src = stabilize_va_list_loc (loc, src, 0);
5209 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5211 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5213 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5214 TREE_SIDE_EFFECTS (t) = 1;
5215 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5217 else
5219 rtx dstb, srcb, size;
5221 /* Evaluate to pointers. */
5222 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5223 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5224 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5225 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5227 dstb = convert_memory_address (Pmode, dstb);
5228 srcb = convert_memory_address (Pmode, srcb);
5230 /* "Dereference" to BLKmode memories. */
5231 dstb = gen_rtx_MEM (BLKmode, dstb);
5232 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5233 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5234 srcb = gen_rtx_MEM (BLKmode, srcb);
5235 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5236 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5238 /* Copy. */
5239 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5242 return const0_rtx;
5245 /* Expand a call to one of the builtin functions __builtin_frame_address or
5246 __builtin_return_address. */
5248 static rtx
5249 expand_builtin_frame_address (tree fndecl, tree exp)
5251 /* The argument must be a nonnegative integer constant.
5252 It counts the number of frames to scan up the stack.
5253 The value is either the frame pointer value or the return
5254 address saved in that frame. */
5255 if (call_expr_nargs (exp) == 0)
5256 /* Warning about missing arg was already issued. */
5257 return const0_rtx;
5258 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5260 error ("invalid argument to %qD", fndecl);
5261 return const0_rtx;
5263 else
5265 /* Number of frames to scan up the stack. */
5266 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5268 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5270 /* Some ports cannot access arbitrary stack frames. */
5271 if (tem == NULL)
5273 warning (0, "unsupported argument to %qD", fndecl);
5274 return const0_rtx;
5277 if (count)
5279 /* Warn since no effort is made to ensure that any frame
5280 beyond the current one exists or can be safely reached. */
5281 warning (OPT_Wframe_address, "calling %qD with "
5282 "a nonzero argument is unsafe", fndecl);
5285 /* For __builtin_frame_address, return what we've got. */
5286 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5287 return tem;
5289 if (!REG_P (tem)
5290 && ! CONSTANT_P (tem))
5291 tem = copy_addr_to_reg (tem);
5292 return tem;
5296 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5297 failed and the caller should emit a normal call. */
5299 static rtx
5300 expand_builtin_alloca (tree exp)
5302 rtx op0;
5303 rtx result;
5304 unsigned int align;
5305 tree fndecl = get_callee_fndecl (exp);
5306 HOST_WIDE_INT max_size;
5307 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5308 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5309 bool valid_arglist
5310 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5311 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5312 VOID_TYPE)
5313 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5314 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5315 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5317 if (!valid_arglist)
5318 return NULL_RTX;
5320 if ((alloca_for_var
5321 && warn_vla_limit >= HOST_WIDE_INT_MAX
5322 && warn_alloc_size_limit < warn_vla_limit)
5323 || (!alloca_for_var
5324 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5325 && warn_alloc_size_limit < warn_alloca_limit
5328 /* -Walloca-larger-than and -Wvla-larger-than settings of
5329 less than HOST_WIDE_INT_MAX override the more general
5330 -Walloc-size-larger-than so unless either of the former
5331 options is smaller than the last one (wchich would imply
5332 that the call was already checked), check the alloca
5333 arguments for overflow. */
5334 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5335 int idx[] = { 0, -1 };
5336 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5339 /* Compute the argument. */
5340 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5342 /* Compute the alignment. */
5343 align = (fcode == BUILT_IN_ALLOCA
5344 ? BIGGEST_ALIGNMENT
5345 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5347 /* Compute the maximum size. */
5348 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5349 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5350 : -1);
5352 /* Allocate the desired space. If the allocation stems from the declaration
5353 of a variable-sized object, it cannot accumulate. */
5354 result
5355 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5356 result = convert_memory_address (ptr_mode, result);
5358 return result;
5361 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5362 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5363 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5364 handle_builtin_stack_restore function. */
5366 static rtx
5367 expand_asan_emit_allocas_unpoison (tree exp)
5369 tree arg0 = CALL_EXPR_ARG (exp, 0);
5370 tree arg1 = CALL_EXPR_ARG (exp, 1);
5371 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5372 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5373 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5374 stack_pointer_rtx, NULL_RTX, 0,
5375 OPTAB_LIB_WIDEN);
5376 off = convert_modes (ptr_mode, Pmode, off, 0);
5377 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5378 OPTAB_LIB_WIDEN);
5379 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5380 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5381 top, ptr_mode, bot, ptr_mode);
5382 return ret;
5385 /* Expand a call to bswap builtin in EXP.
5386 Return NULL_RTX if a normal call should be emitted rather than expanding the
5387 function in-line. If convenient, the result should be placed in TARGET.
5388 SUBTARGET may be used as the target for computing one of EXP's operands. */
5390 static rtx
5391 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5392 rtx subtarget)
5394 tree arg;
5395 rtx op0;
5397 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5398 return NULL_RTX;
5400 arg = CALL_EXPR_ARG (exp, 0);
5401 op0 = expand_expr (arg,
5402 subtarget && GET_MODE (subtarget) == target_mode
5403 ? subtarget : NULL_RTX,
5404 target_mode, EXPAND_NORMAL);
5405 if (GET_MODE (op0) != target_mode)
5406 op0 = convert_to_mode (target_mode, op0, 1);
5408 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5410 gcc_assert (target);
5412 return convert_to_mode (target_mode, target, 1);
5415 /* Expand a call to a unary builtin in EXP.
5416 Return NULL_RTX if a normal call should be emitted rather than expanding the
5417 function in-line. If convenient, the result should be placed in TARGET.
5418 SUBTARGET may be used as the target for computing one of EXP's operands. */
5420 static rtx
5421 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5422 rtx subtarget, optab op_optab)
5424 rtx op0;
5426 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5427 return NULL_RTX;
5429 /* Compute the argument. */
5430 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5431 (subtarget
5432 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5433 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5434 VOIDmode, EXPAND_NORMAL);
5435 /* Compute op, into TARGET if possible.
5436 Set TARGET to wherever the result comes back. */
5437 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5438 op_optab, op0, target, op_optab != clrsb_optab);
5439 gcc_assert (target);
5441 return convert_to_mode (target_mode, target, 0);
5444 /* Expand a call to __builtin_expect. We just return our argument
5445 as the builtin_expect semantic should've been already executed by
5446 tree branch prediction pass. */
5448 static rtx
5449 expand_builtin_expect (tree exp, rtx target)
5451 tree arg;
5453 if (call_expr_nargs (exp) < 2)
5454 return const0_rtx;
5455 arg = CALL_EXPR_ARG (exp, 0);
5457 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5458 /* When guessing was done, the hints should be already stripped away. */
5459 gcc_assert (!flag_guess_branch_prob
5460 || optimize == 0 || seen_error ());
5461 return target;
5464 /* Expand a call to __builtin_expect_with_probability. We just return our
5465 argument as the builtin_expect semantic should've been already executed by
5466 tree branch prediction pass. */
5468 static rtx
5469 expand_builtin_expect_with_probability (tree exp, rtx target)
5471 tree arg;
5473 if (call_expr_nargs (exp) < 3)
5474 return const0_rtx;
5475 arg = CALL_EXPR_ARG (exp, 0);
5477 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5478 /* When guessing was done, the hints should be already stripped away. */
5479 gcc_assert (!flag_guess_branch_prob
5480 || optimize == 0 || seen_error ());
5481 return target;
5485 /* Expand a call to __builtin_assume_aligned. We just return our first
5486 argument as the builtin_assume_aligned semantic should've been already
5487 executed by CCP. */
5489 static rtx
5490 expand_builtin_assume_aligned (tree exp, rtx target)
5492 if (call_expr_nargs (exp) < 2)
5493 return const0_rtx;
5494 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5495 EXPAND_NORMAL);
5496 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5497 && (call_expr_nargs (exp) < 3
5498 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5499 return target;
5502 void
5503 expand_builtin_trap (void)
5505 if (targetm.have_trap ())
5507 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5508 /* For trap insns when not accumulating outgoing args force
5509 REG_ARGS_SIZE note to prevent crossjumping of calls with
5510 different args sizes. */
5511 if (!ACCUMULATE_OUTGOING_ARGS)
5512 add_args_size_note (insn, stack_pointer_delta);
5514 else
5516 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5517 tree call_expr = build_call_expr (fn, 0);
5518 expand_call (call_expr, NULL_RTX, false);
5521 emit_barrier ();
5524 /* Expand a call to __builtin_unreachable. We do nothing except emit
5525 a barrier saying that control flow will not pass here.
5527 It is the responsibility of the program being compiled to ensure
5528 that control flow does never reach __builtin_unreachable. */
5529 static void
5530 expand_builtin_unreachable (void)
5532 emit_barrier ();
5535 /* Expand EXP, a call to fabs, fabsf or fabsl.
5536 Return NULL_RTX if a normal call should be emitted rather than expanding
5537 the function inline. If convenient, the result should be placed
5538 in TARGET. SUBTARGET may be used as the target for computing
5539 the operand. */
5541 static rtx
5542 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5544 machine_mode mode;
5545 tree arg;
5546 rtx op0;
5548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5549 return NULL_RTX;
5551 arg = CALL_EXPR_ARG (exp, 0);
5552 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5553 mode = TYPE_MODE (TREE_TYPE (arg));
5554 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5555 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5558 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5559 Return NULL is a normal call should be emitted rather than expanding the
5560 function inline. If convenient, the result should be placed in TARGET.
5561 SUBTARGET may be used as the target for computing the operand. */
5563 static rtx
5564 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5566 rtx op0, op1;
5567 tree arg;
5569 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5570 return NULL_RTX;
5572 arg = CALL_EXPR_ARG (exp, 0);
5573 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5575 arg = CALL_EXPR_ARG (exp, 1);
5576 op1 = expand_normal (arg);
5578 return expand_copysign (op0, op1, target);
5581 /* Expand a call to __builtin___clear_cache. */
5583 static rtx
5584 expand_builtin___clear_cache (tree exp)
5586 if (!targetm.code_for_clear_cache)
5588 #ifdef CLEAR_INSN_CACHE
5589 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5590 does something. Just do the default expansion to a call to
5591 __clear_cache(). */
5592 return NULL_RTX;
5593 #else
5594 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5595 does nothing. There is no need to call it. Do nothing. */
5596 return const0_rtx;
5597 #endif /* CLEAR_INSN_CACHE */
5600 /* We have a "clear_cache" insn, and it will handle everything. */
5601 tree begin, end;
5602 rtx begin_rtx, end_rtx;
5604 /* We must not expand to a library call. If we did, any
5605 fallback library function in libgcc that might contain a call to
5606 __builtin___clear_cache() would recurse infinitely. */
5607 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5609 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5610 return const0_rtx;
5613 if (targetm.have_clear_cache ())
5615 struct expand_operand ops[2];
5617 begin = CALL_EXPR_ARG (exp, 0);
5618 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5620 end = CALL_EXPR_ARG (exp, 1);
5621 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5623 create_address_operand (&ops[0], begin_rtx);
5624 create_address_operand (&ops[1], end_rtx);
5625 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5626 return const0_rtx;
5628 return const0_rtx;
5631 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5633 static rtx
5634 round_trampoline_addr (rtx tramp)
5636 rtx temp, addend, mask;
5638 /* If we don't need too much alignment, we'll have been guaranteed
5639 proper alignment by get_trampoline_type. */
5640 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5641 return tramp;
5643 /* Round address up to desired boundary. */
5644 temp = gen_reg_rtx (Pmode);
5645 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5646 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5648 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5649 temp, 0, OPTAB_LIB_WIDEN);
5650 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5651 temp, 0, OPTAB_LIB_WIDEN);
5653 return tramp;
5656 static rtx
5657 expand_builtin_init_trampoline (tree exp, bool onstack)
5659 tree t_tramp, t_func, t_chain;
5660 rtx m_tramp, r_tramp, r_chain, tmp;
5662 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5663 POINTER_TYPE, VOID_TYPE))
5664 return NULL_RTX;
5666 t_tramp = CALL_EXPR_ARG (exp, 0);
5667 t_func = CALL_EXPR_ARG (exp, 1);
5668 t_chain = CALL_EXPR_ARG (exp, 2);
5670 r_tramp = expand_normal (t_tramp);
5671 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5672 MEM_NOTRAP_P (m_tramp) = 1;
5674 /* If ONSTACK, the TRAMP argument should be the address of a field
5675 within the local function's FRAME decl. Either way, let's see if
5676 we can fill in the MEM_ATTRs for this memory. */
5677 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5678 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5680 /* Creator of a heap trampoline is responsible for making sure the
5681 address is aligned to at least STACK_BOUNDARY. Normally malloc
5682 will ensure this anyhow. */
5683 tmp = round_trampoline_addr (r_tramp);
5684 if (tmp != r_tramp)
5686 m_tramp = change_address (m_tramp, BLKmode, tmp);
5687 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5688 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5691 /* The FUNC argument should be the address of the nested function.
5692 Extract the actual function decl to pass to the hook. */
5693 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5694 t_func = TREE_OPERAND (t_func, 0);
5695 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5697 r_chain = expand_normal (t_chain);
5699 /* Generate insns to initialize the trampoline. */
5700 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5702 if (onstack)
5704 trampolines_created = 1;
5706 if (targetm.calls.custom_function_descriptors != 0)
5707 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5708 "trampoline generated for nested function %qD", t_func);
5711 return const0_rtx;
5714 static rtx
5715 expand_builtin_adjust_trampoline (tree exp)
5717 rtx tramp;
5719 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5720 return NULL_RTX;
5722 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5723 tramp = round_trampoline_addr (tramp);
5724 if (targetm.calls.trampoline_adjust_address)
5725 tramp = targetm.calls.trampoline_adjust_address (tramp);
5727 return tramp;
5730 /* Expand a call to the builtin descriptor initialization routine.
5731 A descriptor is made up of a couple of pointers to the static
5732 chain and the code entry in this order. */
5734 static rtx
5735 expand_builtin_init_descriptor (tree exp)
5737 tree t_descr, t_func, t_chain;
5738 rtx m_descr, r_descr, r_func, r_chain;
5740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5741 VOID_TYPE))
5742 return NULL_RTX;
5744 t_descr = CALL_EXPR_ARG (exp, 0);
5745 t_func = CALL_EXPR_ARG (exp, 1);
5746 t_chain = CALL_EXPR_ARG (exp, 2);
5748 r_descr = expand_normal (t_descr);
5749 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5750 MEM_NOTRAP_P (m_descr) = 1;
5752 r_func = expand_normal (t_func);
5753 r_chain = expand_normal (t_chain);
5755 /* Generate insns to initialize the descriptor. */
5756 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5757 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5758 POINTER_SIZE / BITS_PER_UNIT), r_func);
5760 return const0_rtx;
5763 /* Expand a call to the builtin descriptor adjustment routine. */
5765 static rtx
5766 expand_builtin_adjust_descriptor (tree exp)
5768 rtx tramp;
5770 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5771 return NULL_RTX;
5773 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5775 /* Unalign the descriptor to allow runtime identification. */
5776 tramp = plus_constant (ptr_mode, tramp,
5777 targetm.calls.custom_function_descriptors);
5779 return force_operand (tramp, NULL_RTX);
5782 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5783 function. The function first checks whether the back end provides
5784 an insn to implement signbit for the respective mode. If not, it
5785 checks whether the floating point format of the value is such that
5786 the sign bit can be extracted. If that is not the case, error out.
5787 EXP is the expression that is a call to the builtin function; if
5788 convenient, the result should be placed in TARGET. */
5789 static rtx
5790 expand_builtin_signbit (tree exp, rtx target)
5792 const struct real_format *fmt;
5793 scalar_float_mode fmode;
5794 scalar_int_mode rmode, imode;
5795 tree arg;
5796 int word, bitpos;
5797 enum insn_code icode;
5798 rtx temp;
5799 location_t loc = EXPR_LOCATION (exp);
5801 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5802 return NULL_RTX;
5804 arg = CALL_EXPR_ARG (exp, 0);
5805 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5806 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5807 fmt = REAL_MODE_FORMAT (fmode);
5809 arg = builtin_save_expr (arg);
5811 /* Expand the argument yielding a RTX expression. */
5812 temp = expand_normal (arg);
5814 /* Check if the back end provides an insn that handles signbit for the
5815 argument's mode. */
5816 icode = optab_handler (signbit_optab, fmode);
5817 if (icode != CODE_FOR_nothing)
5819 rtx_insn *last = get_last_insn ();
5820 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5821 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5822 return target;
5823 delete_insns_since (last);
5826 /* For floating point formats without a sign bit, implement signbit
5827 as "ARG < 0.0". */
5828 bitpos = fmt->signbit_ro;
5829 if (bitpos < 0)
5831 /* But we can't do this if the format supports signed zero. */
5832 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5834 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5835 build_real (TREE_TYPE (arg), dconst0));
5836 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5839 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5841 imode = int_mode_for_mode (fmode).require ();
5842 temp = gen_lowpart (imode, temp);
5844 else
5846 imode = word_mode;
5847 /* Handle targets with different FP word orders. */
5848 if (FLOAT_WORDS_BIG_ENDIAN)
5849 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5850 else
5851 word = bitpos / BITS_PER_WORD;
5852 temp = operand_subword_force (temp, word, fmode);
5853 bitpos = bitpos % BITS_PER_WORD;
5856 /* Force the intermediate word_mode (or narrower) result into a
5857 register. This avoids attempting to create paradoxical SUBREGs
5858 of floating point modes below. */
5859 temp = force_reg (imode, temp);
5861 /* If the bitpos is within the "result mode" lowpart, the operation
5862 can be implement with a single bitwise AND. Otherwise, we need
5863 a right shift and an AND. */
5865 if (bitpos < GET_MODE_BITSIZE (rmode))
5867 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5869 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5870 temp = gen_lowpart (rmode, temp);
5871 temp = expand_binop (rmode, and_optab, temp,
5872 immed_wide_int_const (mask, rmode),
5873 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5875 else
5877 /* Perform a logical right shift to place the signbit in the least
5878 significant bit, then truncate the result to the desired mode
5879 and mask just this bit. */
5880 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5881 temp = gen_lowpart (rmode, temp);
5882 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5883 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5886 return temp;
5889 /* Expand fork or exec calls. TARGET is the desired target of the
5890 call. EXP is the call. FN is the
5891 identificator of the actual function. IGNORE is nonzero if the
5892 value is to be ignored. */
5894 static rtx
5895 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5897 tree id, decl;
5898 tree call;
5900 /* If we are not profiling, just call the function. */
5901 if (!profile_arc_flag)
5902 return NULL_RTX;
5904 /* Otherwise call the wrapper. This should be equivalent for the rest of
5905 compiler, so the code does not diverge, and the wrapper may run the
5906 code necessary for keeping the profiling sane. */
5908 switch (DECL_FUNCTION_CODE (fn))
5910 case BUILT_IN_FORK:
5911 id = get_identifier ("__gcov_fork");
5912 break;
5914 case BUILT_IN_EXECL:
5915 id = get_identifier ("__gcov_execl");
5916 break;
5918 case BUILT_IN_EXECV:
5919 id = get_identifier ("__gcov_execv");
5920 break;
5922 case BUILT_IN_EXECLP:
5923 id = get_identifier ("__gcov_execlp");
5924 break;
5926 case BUILT_IN_EXECLE:
5927 id = get_identifier ("__gcov_execle");
5928 break;
5930 case BUILT_IN_EXECVP:
5931 id = get_identifier ("__gcov_execvp");
5932 break;
5934 case BUILT_IN_EXECVE:
5935 id = get_identifier ("__gcov_execve");
5936 break;
5938 default:
5939 gcc_unreachable ();
5942 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5943 FUNCTION_DECL, id, TREE_TYPE (fn));
5944 DECL_EXTERNAL (decl) = 1;
5945 TREE_PUBLIC (decl) = 1;
5946 DECL_ARTIFICIAL (decl) = 1;
5947 TREE_NOTHROW (decl) = 1;
5948 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5949 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5950 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5951 return expand_call (call, target, ignore);
5956 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5957 the pointer in these functions is void*, the tree optimizers may remove
5958 casts. The mode computed in expand_builtin isn't reliable either, due
5959 to __sync_bool_compare_and_swap.
5961 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5962 group of builtins. This gives us log2 of the mode size. */
5964 static inline machine_mode
5965 get_builtin_sync_mode (int fcode_diff)
5967 /* The size is not negotiable, so ask not to get BLKmode in return
5968 if the target indicates that a smaller size would be better. */
5969 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5972 /* Expand the memory expression LOC and return the appropriate memory operand
5973 for the builtin_sync operations. */
5975 static rtx
5976 get_builtin_sync_mem (tree loc, machine_mode mode)
5978 rtx addr, mem;
5979 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5980 ? TREE_TYPE (TREE_TYPE (loc))
5981 : TREE_TYPE (loc));
5982 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5984 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5985 addr = convert_memory_address (addr_mode, addr);
5987 /* Note that we explicitly do not want any alias information for this
5988 memory, so that we kill all other live memories. Otherwise we don't
5989 satisfy the full barrier semantics of the intrinsic. */
5990 mem = gen_rtx_MEM (mode, addr);
5992 set_mem_addr_space (mem, addr_space);
5994 mem = validize_mem (mem);
5996 /* The alignment needs to be at least according to that of the mode. */
5997 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5998 get_pointer_alignment (loc)));
5999 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6000 MEM_VOLATILE_P (mem) = 1;
6002 return mem;
6005 /* Make sure an argument is in the right mode.
6006 EXP is the tree argument.
6007 MODE is the mode it should be in. */
6009 static rtx
6010 expand_expr_force_mode (tree exp, machine_mode mode)
6012 rtx val;
6013 machine_mode old_mode;
6015 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6016 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6017 of CONST_INTs, where we know the old_mode only from the call argument. */
6019 old_mode = GET_MODE (val);
6020 if (old_mode == VOIDmode)
6021 old_mode = TYPE_MODE (TREE_TYPE (exp));
6022 val = convert_modes (mode, old_mode, val, 1);
6023 return val;
6027 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6028 EXP is the CALL_EXPR. CODE is the rtx code
6029 that corresponds to the arithmetic or logical operation from the name;
6030 an exception here is that NOT actually means NAND. TARGET is an optional
6031 place for us to store the results; AFTER is true if this is the
6032 fetch_and_xxx form. */
6034 static rtx
6035 expand_builtin_sync_operation (machine_mode mode, tree exp,
6036 enum rtx_code code, bool after,
6037 rtx target)
6039 rtx val, mem;
6040 location_t loc = EXPR_LOCATION (exp);
6042 if (code == NOT && warn_sync_nand)
6044 tree fndecl = get_callee_fndecl (exp);
6045 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6047 static bool warned_f_a_n, warned_n_a_f;
6049 switch (fcode)
6051 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6052 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6053 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6054 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6055 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6056 if (warned_f_a_n)
6057 break;
6059 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6060 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6061 warned_f_a_n = true;
6062 break;
6064 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6065 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6066 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6067 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6068 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6069 if (warned_n_a_f)
6070 break;
6072 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6073 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6074 warned_n_a_f = true;
6075 break;
6077 default:
6078 gcc_unreachable ();
6082 /* Expand the operands. */
6083 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6084 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6086 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6087 after);
6090 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6091 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6092 true if this is the boolean form. TARGET is a place for us to store the
6093 results; this is NOT optional if IS_BOOL is true. */
6095 static rtx
6096 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6097 bool is_bool, rtx target)
6099 rtx old_val, new_val, mem;
6100 rtx *pbool, *poval;
6102 /* Expand the operands. */
6103 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6104 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6105 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6107 pbool = poval = NULL;
6108 if (target != const0_rtx)
6110 if (is_bool)
6111 pbool = &target;
6112 else
6113 poval = &target;
6115 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6116 false, MEMMODEL_SYNC_SEQ_CST,
6117 MEMMODEL_SYNC_SEQ_CST))
6118 return NULL_RTX;
6120 return target;
6123 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6124 general form is actually an atomic exchange, and some targets only
6125 support a reduced form with the second argument being a constant 1.
6126 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6127 the results. */
6129 static rtx
6130 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6131 rtx target)
6133 rtx val, mem;
6135 /* Expand the operands. */
6136 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6137 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6139 return expand_sync_lock_test_and_set (target, mem, val);
6142 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6144 static void
6145 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6147 rtx mem;
6149 /* Expand the operands. */
6150 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6152 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6155 /* Given an integer representing an ``enum memmodel'', verify its
6156 correctness and return the memory model enum. */
6158 static enum memmodel
6159 get_memmodel (tree exp)
6161 rtx op;
6162 unsigned HOST_WIDE_INT val;
6163 location_t loc
6164 = expansion_point_location_if_in_system_header (input_location);
6166 /* If the parameter is not a constant, it's a run time value so we'll just
6167 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6168 if (TREE_CODE (exp) != INTEGER_CST)
6169 return MEMMODEL_SEQ_CST;
6171 op = expand_normal (exp);
6173 val = INTVAL (op);
6174 if (targetm.memmodel_check)
6175 val = targetm.memmodel_check (val);
6176 else if (val & ~MEMMODEL_MASK)
6178 warning_at (loc, OPT_Winvalid_memory_model,
6179 "unknown architecture specifier in memory model to builtin");
6180 return MEMMODEL_SEQ_CST;
6183 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6184 if (memmodel_base (val) >= MEMMODEL_LAST)
6186 warning_at (loc, OPT_Winvalid_memory_model,
6187 "invalid memory model argument to builtin");
6188 return MEMMODEL_SEQ_CST;
6191 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6192 be conservative and promote consume to acquire. */
6193 if (val == MEMMODEL_CONSUME)
6194 val = MEMMODEL_ACQUIRE;
6196 return (enum memmodel) val;
6199 /* Expand the __atomic_exchange intrinsic:
6200 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6201 EXP is the CALL_EXPR.
6202 TARGET is an optional place for us to store the results. */
6204 static rtx
6205 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6207 rtx val, mem;
6208 enum memmodel model;
6210 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6212 if (!flag_inline_atomics)
6213 return NULL_RTX;
6215 /* Expand the operands. */
6216 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6217 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6219 return expand_atomic_exchange (target, mem, val, model);
6222 /* Expand the __atomic_compare_exchange intrinsic:
6223 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6224 TYPE desired, BOOL weak,
6225 enum memmodel success,
6226 enum memmodel failure)
6227 EXP is the CALL_EXPR.
6228 TARGET is an optional place for us to store the results. */
6230 static rtx
6231 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6232 rtx target)
6234 rtx expect, desired, mem, oldval;
6235 rtx_code_label *label;
6236 enum memmodel success, failure;
6237 tree weak;
6238 bool is_weak;
6239 location_t loc
6240 = expansion_point_location_if_in_system_header (input_location);
6242 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6243 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6245 if (failure > success)
6247 warning_at (loc, OPT_Winvalid_memory_model,
6248 "failure memory model cannot be stronger than success "
6249 "memory model for %<__atomic_compare_exchange%>");
6250 success = MEMMODEL_SEQ_CST;
6253 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6255 warning_at (loc, OPT_Winvalid_memory_model,
6256 "invalid failure memory model for "
6257 "%<__atomic_compare_exchange%>");
6258 failure = MEMMODEL_SEQ_CST;
6259 success = MEMMODEL_SEQ_CST;
6263 if (!flag_inline_atomics)
6264 return NULL_RTX;
6266 /* Expand the operands. */
6267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6269 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6270 expect = convert_memory_address (Pmode, expect);
6271 expect = gen_rtx_MEM (mode, expect);
6272 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6274 weak = CALL_EXPR_ARG (exp, 3);
6275 is_weak = false;
6276 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6277 is_weak = true;
6279 if (target == const0_rtx)
6280 target = NULL;
6282 /* Lest the rtl backend create a race condition with an imporoper store
6283 to memory, always create a new pseudo for OLDVAL. */
6284 oldval = NULL;
6286 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6287 is_weak, success, failure))
6288 return NULL_RTX;
6290 /* Conditionally store back to EXPECT, lest we create a race condition
6291 with an improper store to memory. */
6292 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6293 the normal case where EXPECT is totally private, i.e. a register. At
6294 which point the store can be unconditional. */
6295 label = gen_label_rtx ();
6296 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6297 GET_MODE (target), 1, label);
6298 emit_move_insn (expect, oldval);
6299 emit_label (label);
6301 return target;
6304 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6305 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6306 call. The weak parameter must be dropped to match the expected parameter
6307 list and the expected argument changed from value to pointer to memory
6308 slot. */
6310 static void
6311 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6313 unsigned int z;
6314 vec<tree, va_gc> *vec;
6316 vec_alloc (vec, 5);
6317 vec->quick_push (gimple_call_arg (call, 0));
6318 tree expected = gimple_call_arg (call, 1);
6319 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6320 TREE_TYPE (expected));
6321 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6322 if (expd != x)
6323 emit_move_insn (x, expd);
6324 tree v = make_tree (TREE_TYPE (expected), x);
6325 vec->quick_push (build1 (ADDR_EXPR,
6326 build_pointer_type (TREE_TYPE (expected)), v));
6327 vec->quick_push (gimple_call_arg (call, 2));
6328 /* Skip the boolean weak parameter. */
6329 for (z = 4; z < 6; z++)
6330 vec->quick_push (gimple_call_arg (call, z));
6331 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6332 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6333 gcc_assert (bytes_log2 < 5);
6334 built_in_function fncode
6335 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6336 + bytes_log2);
6337 tree fndecl = builtin_decl_explicit (fncode);
6338 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6339 fndecl);
6340 tree exp = build_call_vec (boolean_type_node, fn, vec);
6341 tree lhs = gimple_call_lhs (call);
6342 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6343 if (lhs)
6345 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6346 if (GET_MODE (boolret) != mode)
6347 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6348 x = force_reg (mode, x);
6349 write_complex_part (target, boolret, true);
6350 write_complex_part (target, x, false);
6354 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6356 void
6357 expand_ifn_atomic_compare_exchange (gcall *call)
6359 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6360 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6361 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6362 rtx expect, desired, mem, oldval, boolret;
6363 enum memmodel success, failure;
6364 tree lhs;
6365 bool is_weak;
6366 location_t loc
6367 = expansion_point_location_if_in_system_header (gimple_location (call));
6369 success = get_memmodel (gimple_call_arg (call, 4));
6370 failure = get_memmodel (gimple_call_arg (call, 5));
6372 if (failure > success)
6374 warning_at (loc, OPT_Winvalid_memory_model,
6375 "failure memory model cannot be stronger than success "
6376 "memory model for %<__atomic_compare_exchange%>");
6377 success = MEMMODEL_SEQ_CST;
6380 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6382 warning_at (loc, OPT_Winvalid_memory_model,
6383 "invalid failure memory model for "
6384 "%<__atomic_compare_exchange%>");
6385 failure = MEMMODEL_SEQ_CST;
6386 success = MEMMODEL_SEQ_CST;
6389 if (!flag_inline_atomics)
6391 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6392 return;
6395 /* Expand the operands. */
6396 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6398 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6399 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6401 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6403 boolret = NULL;
6404 oldval = NULL;
6406 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6407 is_weak, success, failure))
6409 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6410 return;
6413 lhs = gimple_call_lhs (call);
6414 if (lhs)
6416 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6417 if (GET_MODE (boolret) != mode)
6418 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6419 write_complex_part (target, boolret, true);
6420 write_complex_part (target, oldval, false);
6424 /* Expand the __atomic_load intrinsic:
6425 TYPE __atomic_load (TYPE *object, enum memmodel)
6426 EXP is the CALL_EXPR.
6427 TARGET is an optional place for us to store the results. */
6429 static rtx
6430 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6432 rtx mem;
6433 enum memmodel model;
6435 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6436 if (is_mm_release (model) || is_mm_acq_rel (model))
6438 location_t loc
6439 = expansion_point_location_if_in_system_header (input_location);
6440 warning_at (loc, OPT_Winvalid_memory_model,
6441 "invalid memory model for %<__atomic_load%>");
6442 model = MEMMODEL_SEQ_CST;
6445 if (!flag_inline_atomics)
6446 return NULL_RTX;
6448 /* Expand the operand. */
6449 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6451 return expand_atomic_load (target, mem, model);
6455 /* Expand the __atomic_store intrinsic:
6456 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6457 EXP is the CALL_EXPR.
6458 TARGET is an optional place for us to store the results. */
6460 static rtx
6461 expand_builtin_atomic_store (machine_mode mode, tree exp)
6463 rtx mem, val;
6464 enum memmodel model;
6466 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6467 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6468 || is_mm_release (model)))
6470 location_t loc
6471 = expansion_point_location_if_in_system_header (input_location);
6472 warning_at (loc, OPT_Winvalid_memory_model,
6473 "invalid memory model for %<__atomic_store%>");
6474 model = MEMMODEL_SEQ_CST;
6477 if (!flag_inline_atomics)
6478 return NULL_RTX;
6480 /* Expand the operands. */
6481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6482 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6484 return expand_atomic_store (mem, val, model, false);
6487 /* Expand the __atomic_fetch_XXX intrinsic:
6488 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6489 EXP is the CALL_EXPR.
6490 TARGET is an optional place for us to store the results.
6491 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6492 FETCH_AFTER is true if returning the result of the operation.
6493 FETCH_AFTER is false if returning the value before the operation.
6494 IGNORE is true if the result is not used.
6495 EXT_CALL is the correct builtin for an external call if this cannot be
6496 resolved to an instruction sequence. */
6498 static rtx
6499 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6500 enum rtx_code code, bool fetch_after,
6501 bool ignore, enum built_in_function ext_call)
6503 rtx val, mem, ret;
6504 enum memmodel model;
6505 tree fndecl;
6506 tree addr;
6508 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6510 /* Expand the operands. */
6511 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6512 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6514 /* Only try generating instructions if inlining is turned on. */
6515 if (flag_inline_atomics)
6517 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6518 if (ret)
6519 return ret;
6522 /* Return if a different routine isn't needed for the library call. */
6523 if (ext_call == BUILT_IN_NONE)
6524 return NULL_RTX;
6526 /* Change the call to the specified function. */
6527 fndecl = get_callee_fndecl (exp);
6528 addr = CALL_EXPR_FN (exp);
6529 STRIP_NOPS (addr);
6531 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6532 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6534 /* If we will emit code after the call, the call cannot be a tail call.
6535 If it is emitted as a tail call, a barrier is emitted after it, and
6536 then all trailing code is removed. */
6537 if (!ignore)
6538 CALL_EXPR_TAILCALL (exp) = 0;
6540 /* Expand the call here so we can emit trailing code. */
6541 ret = expand_call (exp, target, ignore);
6543 /* Replace the original function just in case it matters. */
6544 TREE_OPERAND (addr, 0) = fndecl;
6546 /* Then issue the arithmetic correction to return the right result. */
6547 if (!ignore)
6549 if (code == NOT)
6551 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6552 OPTAB_LIB_WIDEN);
6553 ret = expand_simple_unop (mode, NOT, ret, target, true);
6555 else
6556 ret = expand_simple_binop (mode, code, ret, val, target, true,
6557 OPTAB_LIB_WIDEN);
6559 return ret;
6562 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6564 void
6565 expand_ifn_atomic_bit_test_and (gcall *call)
6567 tree ptr = gimple_call_arg (call, 0);
6568 tree bit = gimple_call_arg (call, 1);
6569 tree flag = gimple_call_arg (call, 2);
6570 tree lhs = gimple_call_lhs (call);
6571 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6572 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6573 enum rtx_code code;
6574 optab optab;
6575 struct expand_operand ops[5];
6577 gcc_assert (flag_inline_atomics);
6579 if (gimple_call_num_args (call) == 4)
6580 model = get_memmodel (gimple_call_arg (call, 3));
6582 rtx mem = get_builtin_sync_mem (ptr, mode);
6583 rtx val = expand_expr_force_mode (bit, mode);
6585 switch (gimple_call_internal_fn (call))
6587 case IFN_ATOMIC_BIT_TEST_AND_SET:
6588 code = IOR;
6589 optab = atomic_bit_test_and_set_optab;
6590 break;
6591 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6592 code = XOR;
6593 optab = atomic_bit_test_and_complement_optab;
6594 break;
6595 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6596 code = AND;
6597 optab = atomic_bit_test_and_reset_optab;
6598 break;
6599 default:
6600 gcc_unreachable ();
6603 if (lhs == NULL_TREE)
6605 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6606 val, NULL_RTX, true, OPTAB_DIRECT);
6607 if (code == AND)
6608 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6609 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6610 return;
6613 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6614 enum insn_code icode = direct_optab_handler (optab, mode);
6615 gcc_assert (icode != CODE_FOR_nothing);
6616 create_output_operand (&ops[0], target, mode);
6617 create_fixed_operand (&ops[1], mem);
6618 create_convert_operand_to (&ops[2], val, mode, true);
6619 create_integer_operand (&ops[3], model);
6620 create_integer_operand (&ops[4], integer_onep (flag));
6621 if (maybe_expand_insn (icode, 5, ops))
6622 return;
6624 rtx bitval = val;
6625 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6626 val, NULL_RTX, true, OPTAB_DIRECT);
6627 rtx maskval = val;
6628 if (code == AND)
6629 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6630 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6631 code, model, false);
6632 if (integer_onep (flag))
6634 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6635 NULL_RTX, true, OPTAB_DIRECT);
6636 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6637 true, OPTAB_DIRECT);
6639 else
6640 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6641 OPTAB_DIRECT);
6642 if (result != target)
6643 emit_move_insn (target, result);
6646 /* Expand an atomic clear operation.
6647 void _atomic_clear (BOOL *obj, enum memmodel)
6648 EXP is the call expression. */
6650 static rtx
6651 expand_builtin_atomic_clear (tree exp)
6653 machine_mode mode;
6654 rtx mem, ret;
6655 enum memmodel model;
6657 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6658 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6659 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6661 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6663 location_t loc
6664 = expansion_point_location_if_in_system_header (input_location);
6665 warning_at (loc, OPT_Winvalid_memory_model,
6666 "invalid memory model for %<__atomic_store%>");
6667 model = MEMMODEL_SEQ_CST;
6670 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6671 Failing that, a store is issued by __atomic_store. The only way this can
6672 fail is if the bool type is larger than a word size. Unlikely, but
6673 handle it anyway for completeness. Assume a single threaded model since
6674 there is no atomic support in this case, and no barriers are required. */
6675 ret = expand_atomic_store (mem, const0_rtx, model, true);
6676 if (!ret)
6677 emit_move_insn (mem, const0_rtx);
6678 return const0_rtx;
6681 /* Expand an atomic test_and_set operation.
6682 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6683 EXP is the call expression. */
6685 static rtx
6686 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6688 rtx mem;
6689 enum memmodel model;
6690 machine_mode mode;
6692 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6693 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6694 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6696 return expand_atomic_test_and_set (target, mem, model);
6700 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6701 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6703 static tree
6704 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6706 int size;
6707 machine_mode mode;
6708 unsigned int mode_align, type_align;
6710 if (TREE_CODE (arg0) != INTEGER_CST)
6711 return NULL_TREE;
6713 /* We need a corresponding integer mode for the access to be lock-free. */
6714 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6715 if (!int_mode_for_size (size, 0).exists (&mode))
6716 return boolean_false_node;
6718 mode_align = GET_MODE_ALIGNMENT (mode);
6720 if (TREE_CODE (arg1) == INTEGER_CST)
6722 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6724 /* Either this argument is null, or it's a fake pointer encoding
6725 the alignment of the object. */
6726 val = least_bit_hwi (val);
6727 val *= BITS_PER_UNIT;
6729 if (val == 0 || mode_align < val)
6730 type_align = mode_align;
6731 else
6732 type_align = val;
6734 else
6736 tree ttype = TREE_TYPE (arg1);
6738 /* This function is usually invoked and folded immediately by the front
6739 end before anything else has a chance to look at it. The pointer
6740 parameter at this point is usually cast to a void *, so check for that
6741 and look past the cast. */
6742 if (CONVERT_EXPR_P (arg1)
6743 && POINTER_TYPE_P (ttype)
6744 && VOID_TYPE_P (TREE_TYPE (ttype))
6745 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6746 arg1 = TREE_OPERAND (arg1, 0);
6748 ttype = TREE_TYPE (arg1);
6749 gcc_assert (POINTER_TYPE_P (ttype));
6751 /* Get the underlying type of the object. */
6752 ttype = TREE_TYPE (ttype);
6753 type_align = TYPE_ALIGN (ttype);
6756 /* If the object has smaller alignment, the lock free routines cannot
6757 be used. */
6758 if (type_align < mode_align)
6759 return boolean_false_node;
6761 /* Check if a compare_and_swap pattern exists for the mode which represents
6762 the required size. The pattern is not allowed to fail, so the existence
6763 of the pattern indicates support is present. Also require that an
6764 atomic load exists for the required size. */
6765 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6766 return boolean_true_node;
6767 else
6768 return boolean_false_node;
6771 /* Return true if the parameters to call EXP represent an object which will
6772 always generate lock free instructions. The first argument represents the
6773 size of the object, and the second parameter is a pointer to the object
6774 itself. If NULL is passed for the object, then the result is based on
6775 typical alignment for an object of the specified size. Otherwise return
6776 false. */
6778 static rtx
6779 expand_builtin_atomic_always_lock_free (tree exp)
6781 tree size;
6782 tree arg0 = CALL_EXPR_ARG (exp, 0);
6783 tree arg1 = CALL_EXPR_ARG (exp, 1);
6785 if (TREE_CODE (arg0) != INTEGER_CST)
6787 error ("non-constant argument 1 to __atomic_always_lock_free");
6788 return const0_rtx;
6791 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6792 if (size == boolean_true_node)
6793 return const1_rtx;
6794 return const0_rtx;
6797 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6798 is lock free on this architecture. */
6800 static tree
6801 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6803 if (!flag_inline_atomics)
6804 return NULL_TREE;
6806 /* If it isn't always lock free, don't generate a result. */
6807 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6808 return boolean_true_node;
6810 return NULL_TREE;
6813 /* Return true if the parameters to call EXP represent an object which will
6814 always generate lock free instructions. The first argument represents the
6815 size of the object, and the second parameter is a pointer to the object
6816 itself. If NULL is passed for the object, then the result is based on
6817 typical alignment for an object of the specified size. Otherwise return
6818 NULL*/
6820 static rtx
6821 expand_builtin_atomic_is_lock_free (tree exp)
6823 tree size;
6824 tree arg0 = CALL_EXPR_ARG (exp, 0);
6825 tree arg1 = CALL_EXPR_ARG (exp, 1);
6827 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6829 error ("non-integer argument 1 to __atomic_is_lock_free");
6830 return NULL_RTX;
6833 if (!flag_inline_atomics)
6834 return NULL_RTX;
6836 /* If the value is known at compile time, return the RTX for it. */
6837 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6838 if (size == boolean_true_node)
6839 return const1_rtx;
6841 return NULL_RTX;
6844 /* Expand the __atomic_thread_fence intrinsic:
6845 void __atomic_thread_fence (enum memmodel)
6846 EXP is the CALL_EXPR. */
6848 static void
6849 expand_builtin_atomic_thread_fence (tree exp)
6851 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6852 expand_mem_thread_fence (model);
6855 /* Expand the __atomic_signal_fence intrinsic:
6856 void __atomic_signal_fence (enum memmodel)
6857 EXP is the CALL_EXPR. */
6859 static void
6860 expand_builtin_atomic_signal_fence (tree exp)
6862 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6863 expand_mem_signal_fence (model);
6866 /* Expand the __sync_synchronize intrinsic. */
6868 static void
6869 expand_builtin_sync_synchronize (void)
6871 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6874 static rtx
6875 expand_builtin_thread_pointer (tree exp, rtx target)
6877 enum insn_code icode;
6878 if (!validate_arglist (exp, VOID_TYPE))
6879 return const0_rtx;
6880 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6881 if (icode != CODE_FOR_nothing)
6883 struct expand_operand op;
6884 /* If the target is not sutitable then create a new target. */
6885 if (target == NULL_RTX
6886 || !REG_P (target)
6887 || GET_MODE (target) != Pmode)
6888 target = gen_reg_rtx (Pmode);
6889 create_output_operand (&op, target, Pmode);
6890 expand_insn (icode, 1, &op);
6891 return target;
6893 error ("%<__builtin_thread_pointer%> is not supported on this target");
6894 return const0_rtx;
6897 static void
6898 expand_builtin_set_thread_pointer (tree exp)
6900 enum insn_code icode;
6901 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6902 return;
6903 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6904 if (icode != CODE_FOR_nothing)
6906 struct expand_operand op;
6907 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6908 Pmode, EXPAND_NORMAL);
6909 create_input_operand (&op, val, Pmode);
6910 expand_insn (icode, 1, &op);
6911 return;
6913 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6917 /* Emit code to restore the current value of stack. */
6919 static void
6920 expand_stack_restore (tree var)
6922 rtx_insn *prev;
6923 rtx sa = expand_normal (var);
6925 sa = convert_memory_address (Pmode, sa);
6927 prev = get_last_insn ();
6928 emit_stack_restore (SAVE_BLOCK, sa);
6930 record_new_stack_level ();
6932 fixup_args_size_notes (prev, get_last_insn (), 0);
6935 /* Emit code to save the current value of stack. */
6937 static rtx
6938 expand_stack_save (void)
6940 rtx ret = NULL_RTX;
6942 emit_stack_save (SAVE_BLOCK, &ret);
6943 return ret;
6946 /* Emit code to get the openacc gang, worker or vector id or size. */
6948 static rtx
6949 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6951 const char *name;
6952 rtx fallback_retval;
6953 rtx_insn *(*gen_fn) (rtx, rtx);
6954 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6956 case BUILT_IN_GOACC_PARLEVEL_ID:
6957 name = "__builtin_goacc_parlevel_id";
6958 fallback_retval = const0_rtx;
6959 gen_fn = targetm.gen_oacc_dim_pos;
6960 break;
6961 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6962 name = "__builtin_goacc_parlevel_size";
6963 fallback_retval = const1_rtx;
6964 gen_fn = targetm.gen_oacc_dim_size;
6965 break;
6966 default:
6967 gcc_unreachable ();
6970 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6972 error ("%qs only supported in OpenACC code", name);
6973 return const0_rtx;
6976 tree arg = CALL_EXPR_ARG (exp, 0);
6977 if (TREE_CODE (arg) != INTEGER_CST)
6979 error ("non-constant argument 0 to %qs", name);
6980 return const0_rtx;
6983 int dim = TREE_INT_CST_LOW (arg);
6984 switch (dim)
6986 case GOMP_DIM_GANG:
6987 case GOMP_DIM_WORKER:
6988 case GOMP_DIM_VECTOR:
6989 break;
6990 default:
6991 error ("illegal argument 0 to %qs", name);
6992 return const0_rtx;
6995 if (ignore)
6996 return target;
6998 if (target == NULL_RTX)
6999 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7001 if (!targetm.have_oacc_dim_size ())
7003 emit_move_insn (target, fallback_retval);
7004 return target;
7007 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7008 emit_insn (gen_fn (reg, GEN_INT (dim)));
7009 if (reg != target)
7010 emit_move_insn (target, reg);
7012 return target;
7015 /* Expand a string compare operation using a sequence of char comparison
7016 to get rid of the calling overhead, with result going to TARGET if
7017 that's convenient.
7019 VAR_STR is the variable string source;
7020 CONST_STR is the constant string source;
7021 LENGTH is the number of chars to compare;
7022 CONST_STR_N indicates which source string is the constant string;
7023 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7025 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7027 target = (int) (unsigned char) var_str[0]
7028 - (int) (unsigned char) const_str[0];
7029 if (target != 0)
7030 goto ne_label;
7032 target = (int) (unsigned char) var_str[length - 2]
7033 - (int) (unsigned char) const_str[length - 2];
7034 if (target != 0)
7035 goto ne_label;
7036 target = (int) (unsigned char) var_str[length - 1]
7037 - (int) (unsigned char) const_str[length - 1];
7038 ne_label:
7041 static rtx
7042 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7043 unsigned HOST_WIDE_INT length,
7044 int const_str_n, machine_mode mode)
7046 HOST_WIDE_INT offset = 0;
7047 rtx var_rtx_array
7048 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7049 rtx var_rtx = NULL_RTX;
7050 rtx const_rtx = NULL_RTX;
7051 rtx result = target ? target : gen_reg_rtx (mode);
7052 rtx_code_label *ne_label = gen_label_rtx ();
7053 tree unit_type_node = unsigned_char_type_node;
7054 scalar_int_mode unit_mode
7055 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7057 start_sequence ();
7059 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7061 var_rtx
7062 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7063 const_rtx = c_readstr (const_str + offset, unit_mode);
7064 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7065 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7067 op0 = convert_modes (mode, unit_mode, op0, 1);
7068 op1 = convert_modes (mode, unit_mode, op1, 1);
7069 result = expand_simple_binop (mode, MINUS, op0, op1,
7070 result, 1, OPTAB_WIDEN);
7071 if (i < length - 1)
7072 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7073 mode, true, ne_label);
7074 offset += GET_MODE_SIZE (unit_mode);
7077 emit_label (ne_label);
7078 rtx_insn *insns = get_insns ();
7079 end_sequence ();
7080 emit_insn (insns);
7082 return result;
7085 /* Inline expansion a call to str(n)cmp, with result going to
7086 TARGET if that's convenient.
7087 If the call is not been inlined, return NULL_RTX. */
7088 static rtx
7089 inline_expand_builtin_string_cmp (tree exp, rtx target)
7091 tree fndecl = get_callee_fndecl (exp);
7092 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7093 unsigned HOST_WIDE_INT length = 0;
7094 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7096 /* Do NOT apply this inlining expansion when optimizing for size or
7097 optimization level below 2. */
7098 if (optimize < 2 || optimize_insn_for_size_p ())
7099 return NULL_RTX;
7101 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7102 || fcode == BUILT_IN_STRNCMP
7103 || fcode == BUILT_IN_MEMCMP);
7105 /* On a target where the type of the call (int) has same or narrower presicion
7106 than unsigned char, give up the inlining expansion. */
7107 if (TYPE_PRECISION (unsigned_char_type_node)
7108 >= TYPE_PRECISION (TREE_TYPE (exp)))
7109 return NULL_RTX;
7111 tree arg1 = CALL_EXPR_ARG (exp, 0);
7112 tree arg2 = CALL_EXPR_ARG (exp, 1);
7113 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7115 unsigned HOST_WIDE_INT len1 = 0;
7116 unsigned HOST_WIDE_INT len2 = 0;
7117 unsigned HOST_WIDE_INT len3 = 0;
7119 const char *src_str1 = c_getstr (arg1, &len1);
7120 const char *src_str2 = c_getstr (arg2, &len2);
7122 /* If neither strings is constant string, the call is not qualify. */
7123 if (!src_str1 && !src_str2)
7124 return NULL_RTX;
7126 /* For strncmp, if the length is not a const, not qualify. */
7127 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7128 return NULL_RTX;
7130 int const_str_n = 0;
7131 if (!len1)
7132 const_str_n = 2;
7133 else if (!len2)
7134 const_str_n = 1;
7135 else if (len2 > len1)
7136 const_str_n = 1;
7137 else
7138 const_str_n = 2;
7140 gcc_checking_assert (const_str_n > 0);
7141 length = (const_str_n == 1) ? len1 : len2;
7143 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7144 length = len3;
7146 /* If the length of the comparision is larger than the threshold,
7147 do nothing. */
7148 if (length > (unsigned HOST_WIDE_INT)
7149 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7150 return NULL_RTX;
7152 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7154 /* Now, start inline expansion the call. */
7155 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7156 (const_str_n == 1) ? src_str1 : src_str2, length,
7157 const_str_n, mode);
7160 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7161 represents the size of the first argument to that call, or VOIDmode
7162 if the argument is a pointer. IGNORE will be true if the result
7163 isn't used. */
7164 static rtx
7165 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7166 bool ignore)
7168 rtx val, failsafe;
7169 unsigned nargs = call_expr_nargs (exp);
7171 tree arg0 = CALL_EXPR_ARG (exp, 0);
7173 if (mode == VOIDmode)
7175 mode = TYPE_MODE (TREE_TYPE (arg0));
7176 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7179 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7181 /* An optional second argument can be used as a failsafe value on
7182 some machines. If it isn't present, then the failsafe value is
7183 assumed to be 0. */
7184 if (nargs > 1)
7186 tree arg1 = CALL_EXPR_ARG (exp, 1);
7187 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7189 else
7190 failsafe = const0_rtx;
7192 /* If the result isn't used, the behavior is undefined. It would be
7193 nice to emit a warning here, but path splitting means this might
7194 happen with legitimate code. So simply drop the builtin
7195 expansion in that case; we've handled any side-effects above. */
7196 if (ignore)
7197 return const0_rtx;
7199 /* If we don't have a suitable target, create one to hold the result. */
7200 if (target == NULL || GET_MODE (target) != mode)
7201 target = gen_reg_rtx (mode);
7203 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7204 val = convert_modes (mode, VOIDmode, val, false);
7206 return targetm.speculation_safe_value (mode, target, val, failsafe);
7209 /* Expand an expression EXP that calls a built-in function,
7210 with result going to TARGET if that's convenient
7211 (and in mode MODE if that's convenient).
7212 SUBTARGET may be used as the target for computing one of EXP's operands.
7213 IGNORE is nonzero if the value is to be ignored. */
7216 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7217 int ignore)
7219 tree fndecl = get_callee_fndecl (exp);
7220 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7221 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7222 int flags;
7224 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7225 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7227 /* When ASan is enabled, we don't want to expand some memory/string
7228 builtins and rely on libsanitizer's hooks. This allows us to avoid
7229 redundant checks and be sure, that possible overflow will be detected
7230 by ASan. */
7232 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7233 return expand_call (exp, target, ignore);
7235 /* When not optimizing, generate calls to library functions for a certain
7236 set of builtins. */
7237 if (!optimize
7238 && !called_as_built_in (fndecl)
7239 && fcode != BUILT_IN_FORK
7240 && fcode != BUILT_IN_EXECL
7241 && fcode != BUILT_IN_EXECV
7242 && fcode != BUILT_IN_EXECLP
7243 && fcode != BUILT_IN_EXECLE
7244 && fcode != BUILT_IN_EXECVP
7245 && fcode != BUILT_IN_EXECVE
7246 && !ALLOCA_FUNCTION_CODE_P (fcode)
7247 && fcode != BUILT_IN_FREE)
7248 return expand_call (exp, target, ignore);
7250 /* The built-in function expanders test for target == const0_rtx
7251 to determine whether the function's result will be ignored. */
7252 if (ignore)
7253 target = const0_rtx;
7255 /* If the result of a pure or const built-in function is ignored, and
7256 none of its arguments are volatile, we can avoid expanding the
7257 built-in call and just evaluate the arguments for side-effects. */
7258 if (target == const0_rtx
7259 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7260 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7262 bool volatilep = false;
7263 tree arg;
7264 call_expr_arg_iterator iter;
7266 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7267 if (TREE_THIS_VOLATILE (arg))
7269 volatilep = true;
7270 break;
7273 if (! volatilep)
7275 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7276 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7277 return const0_rtx;
7281 switch (fcode)
7283 CASE_FLT_FN (BUILT_IN_FABS):
7284 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7285 case BUILT_IN_FABSD32:
7286 case BUILT_IN_FABSD64:
7287 case BUILT_IN_FABSD128:
7288 target = expand_builtin_fabs (exp, target, subtarget);
7289 if (target)
7290 return target;
7291 break;
7293 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7294 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7295 target = expand_builtin_copysign (exp, target, subtarget);
7296 if (target)
7297 return target;
7298 break;
7300 /* Just do a normal library call if we were unable to fold
7301 the values. */
7302 CASE_FLT_FN (BUILT_IN_CABS):
7303 break;
7305 CASE_FLT_FN (BUILT_IN_FMA):
7306 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7307 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7308 if (target)
7309 return target;
7310 break;
7312 CASE_FLT_FN (BUILT_IN_ILOGB):
7313 if (! flag_unsafe_math_optimizations)
7314 break;
7315 gcc_fallthrough ();
7316 CASE_FLT_FN (BUILT_IN_ISINF):
7317 CASE_FLT_FN (BUILT_IN_FINITE):
7318 case BUILT_IN_ISFINITE:
7319 case BUILT_IN_ISNORMAL:
7320 target = expand_builtin_interclass_mathfn (exp, target);
7321 if (target)
7322 return target;
7323 break;
7325 CASE_FLT_FN (BUILT_IN_ICEIL):
7326 CASE_FLT_FN (BUILT_IN_LCEIL):
7327 CASE_FLT_FN (BUILT_IN_LLCEIL):
7328 CASE_FLT_FN (BUILT_IN_LFLOOR):
7329 CASE_FLT_FN (BUILT_IN_IFLOOR):
7330 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7331 target = expand_builtin_int_roundingfn (exp, target);
7332 if (target)
7333 return target;
7334 break;
7336 CASE_FLT_FN (BUILT_IN_IRINT):
7337 CASE_FLT_FN (BUILT_IN_LRINT):
7338 CASE_FLT_FN (BUILT_IN_LLRINT):
7339 CASE_FLT_FN (BUILT_IN_IROUND):
7340 CASE_FLT_FN (BUILT_IN_LROUND):
7341 CASE_FLT_FN (BUILT_IN_LLROUND):
7342 target = expand_builtin_int_roundingfn_2 (exp, target);
7343 if (target)
7344 return target;
7345 break;
7347 CASE_FLT_FN (BUILT_IN_POWI):
7348 target = expand_builtin_powi (exp, target);
7349 if (target)
7350 return target;
7351 break;
7353 CASE_FLT_FN (BUILT_IN_CEXPI):
7354 target = expand_builtin_cexpi (exp, target);
7355 gcc_assert (target);
7356 return target;
7358 CASE_FLT_FN (BUILT_IN_SIN):
7359 CASE_FLT_FN (BUILT_IN_COS):
7360 if (! flag_unsafe_math_optimizations)
7361 break;
7362 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7363 if (target)
7364 return target;
7365 break;
7367 CASE_FLT_FN (BUILT_IN_SINCOS):
7368 if (! flag_unsafe_math_optimizations)
7369 break;
7370 target = expand_builtin_sincos (exp);
7371 if (target)
7372 return target;
7373 break;
7375 case BUILT_IN_APPLY_ARGS:
7376 return expand_builtin_apply_args ();
7378 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7379 FUNCTION with a copy of the parameters described by
7380 ARGUMENTS, and ARGSIZE. It returns a block of memory
7381 allocated on the stack into which is stored all the registers
7382 that might possibly be used for returning the result of a
7383 function. ARGUMENTS is the value returned by
7384 __builtin_apply_args. ARGSIZE is the number of bytes of
7385 arguments that must be copied. ??? How should this value be
7386 computed? We'll also need a safe worst case value for varargs
7387 functions. */
7388 case BUILT_IN_APPLY:
7389 if (!validate_arglist (exp, POINTER_TYPE,
7390 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7391 && !validate_arglist (exp, REFERENCE_TYPE,
7392 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7393 return const0_rtx;
7394 else
7396 rtx ops[3];
7398 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7399 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7400 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7402 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7405 /* __builtin_return (RESULT) causes the function to return the
7406 value described by RESULT. RESULT is address of the block of
7407 memory returned by __builtin_apply. */
7408 case BUILT_IN_RETURN:
7409 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7410 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7411 return const0_rtx;
7413 case BUILT_IN_SAVEREGS:
7414 return expand_builtin_saveregs ();
7416 case BUILT_IN_VA_ARG_PACK:
7417 /* All valid uses of __builtin_va_arg_pack () are removed during
7418 inlining. */
7419 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7420 return const0_rtx;
7422 case BUILT_IN_VA_ARG_PACK_LEN:
7423 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7424 inlining. */
7425 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7426 return const0_rtx;
7428 /* Return the address of the first anonymous stack arg. */
7429 case BUILT_IN_NEXT_ARG:
7430 if (fold_builtin_next_arg (exp, false))
7431 return const0_rtx;
7432 return expand_builtin_next_arg ();
7434 case BUILT_IN_CLEAR_CACHE:
7435 target = expand_builtin___clear_cache (exp);
7436 if (target)
7437 return target;
7438 break;
7440 case BUILT_IN_CLASSIFY_TYPE:
7441 return expand_builtin_classify_type (exp);
7443 case BUILT_IN_CONSTANT_P:
7444 return const0_rtx;
7446 case BUILT_IN_FRAME_ADDRESS:
7447 case BUILT_IN_RETURN_ADDRESS:
7448 return expand_builtin_frame_address (fndecl, exp);
7450 /* Returns the address of the area where the structure is returned.
7451 0 otherwise. */
7452 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7453 if (call_expr_nargs (exp) != 0
7454 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7455 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7456 return const0_rtx;
7457 else
7458 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7460 CASE_BUILT_IN_ALLOCA:
7461 target = expand_builtin_alloca (exp);
7462 if (target)
7463 return target;
7464 break;
7466 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7467 return expand_asan_emit_allocas_unpoison (exp);
7469 case BUILT_IN_STACK_SAVE:
7470 return expand_stack_save ();
7472 case BUILT_IN_STACK_RESTORE:
7473 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7474 return const0_rtx;
7476 case BUILT_IN_BSWAP16:
7477 case BUILT_IN_BSWAP32:
7478 case BUILT_IN_BSWAP64:
7479 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7480 if (target)
7481 return target;
7482 break;
7484 CASE_INT_FN (BUILT_IN_FFS):
7485 target = expand_builtin_unop (target_mode, exp, target,
7486 subtarget, ffs_optab);
7487 if (target)
7488 return target;
7489 break;
7491 CASE_INT_FN (BUILT_IN_CLZ):
7492 target = expand_builtin_unop (target_mode, exp, target,
7493 subtarget, clz_optab);
7494 if (target)
7495 return target;
7496 break;
7498 CASE_INT_FN (BUILT_IN_CTZ):
7499 target = expand_builtin_unop (target_mode, exp, target,
7500 subtarget, ctz_optab);
7501 if (target)
7502 return target;
7503 break;
7505 CASE_INT_FN (BUILT_IN_CLRSB):
7506 target = expand_builtin_unop (target_mode, exp, target,
7507 subtarget, clrsb_optab);
7508 if (target)
7509 return target;
7510 break;
7512 CASE_INT_FN (BUILT_IN_POPCOUNT):
7513 target = expand_builtin_unop (target_mode, exp, target,
7514 subtarget, popcount_optab);
7515 if (target)
7516 return target;
7517 break;
7519 CASE_INT_FN (BUILT_IN_PARITY):
7520 target = expand_builtin_unop (target_mode, exp, target,
7521 subtarget, parity_optab);
7522 if (target)
7523 return target;
7524 break;
7526 case BUILT_IN_STRLEN:
7527 target = expand_builtin_strlen (exp, target, target_mode);
7528 if (target)
7529 return target;
7530 break;
7532 case BUILT_IN_STRNLEN:
7533 target = expand_builtin_strnlen (exp, target, target_mode);
7534 if (target)
7535 return target;
7536 break;
7538 case BUILT_IN_STRCAT:
7539 target = expand_builtin_strcat (exp, target);
7540 if (target)
7541 return target;
7542 break;
7544 case BUILT_IN_STRCPY:
7545 target = expand_builtin_strcpy (exp, target);
7546 if (target)
7547 return target;
7548 break;
7550 case BUILT_IN_STRNCAT:
7551 target = expand_builtin_strncat (exp, target);
7552 if (target)
7553 return target;
7554 break;
7556 case BUILT_IN_STRNCPY:
7557 target = expand_builtin_strncpy (exp, target);
7558 if (target)
7559 return target;
7560 break;
7562 case BUILT_IN_STPCPY:
7563 target = expand_builtin_stpcpy (exp, target, mode);
7564 if (target)
7565 return target;
7566 break;
7568 case BUILT_IN_STPNCPY:
7569 target = expand_builtin_stpncpy (exp, target);
7570 if (target)
7571 return target;
7572 break;
7574 case BUILT_IN_MEMCHR:
7575 target = expand_builtin_memchr (exp, target);
7576 if (target)
7577 return target;
7578 break;
7580 case BUILT_IN_MEMCPY:
7581 target = expand_builtin_memcpy (exp, target);
7582 if (target)
7583 return target;
7584 break;
7586 case BUILT_IN_MEMMOVE:
7587 target = expand_builtin_memmove (exp, target);
7588 if (target)
7589 return target;
7590 break;
7592 case BUILT_IN_MEMPCPY:
7593 target = expand_builtin_mempcpy (exp, target);
7594 if (target)
7595 return target;
7596 break;
7598 case BUILT_IN_MEMSET:
7599 target = expand_builtin_memset (exp, target, mode);
7600 if (target)
7601 return target;
7602 break;
7604 case BUILT_IN_BZERO:
7605 target = expand_builtin_bzero (exp);
7606 if (target)
7607 return target;
7608 break;
7610 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7611 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7612 when changing it to a strcmp call. */
7613 case BUILT_IN_STRCMP_EQ:
7614 target = expand_builtin_memcmp (exp, target, true);
7615 if (target)
7616 return target;
7618 /* Change this call back to a BUILT_IN_STRCMP. */
7619 TREE_OPERAND (exp, 1)
7620 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7622 /* Delete the last parameter. */
7623 unsigned int i;
7624 vec<tree, va_gc> *arg_vec;
7625 vec_alloc (arg_vec, 2);
7626 for (i = 0; i < 2; i++)
7627 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7628 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7629 /* FALLTHROUGH */
7631 case BUILT_IN_STRCMP:
7632 target = expand_builtin_strcmp (exp, target);
7633 if (target)
7634 return target;
7635 break;
7637 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7638 back to a BUILT_IN_STRNCMP. */
7639 case BUILT_IN_STRNCMP_EQ:
7640 target = expand_builtin_memcmp (exp, target, true);
7641 if (target)
7642 return target;
7644 /* Change it back to a BUILT_IN_STRNCMP. */
7645 TREE_OPERAND (exp, 1)
7646 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7647 /* FALLTHROUGH */
7649 case BUILT_IN_STRNCMP:
7650 target = expand_builtin_strncmp (exp, target, mode);
7651 if (target)
7652 return target;
7653 break;
7655 case BUILT_IN_BCMP:
7656 case BUILT_IN_MEMCMP:
7657 case BUILT_IN_MEMCMP_EQ:
7658 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7659 if (target)
7660 return target;
7661 if (fcode == BUILT_IN_MEMCMP_EQ)
7663 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7664 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7666 break;
7668 case BUILT_IN_SETJMP:
7669 /* This should have been lowered to the builtins below. */
7670 gcc_unreachable ();
7672 case BUILT_IN_SETJMP_SETUP:
7673 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7674 and the receiver label. */
7675 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7677 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7678 VOIDmode, EXPAND_NORMAL);
7679 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7680 rtx_insn *label_r = label_rtx (label);
7682 /* This is copied from the handling of non-local gotos. */
7683 expand_builtin_setjmp_setup (buf_addr, label_r);
7684 nonlocal_goto_handler_labels
7685 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7686 nonlocal_goto_handler_labels);
7687 /* ??? Do not let expand_label treat us as such since we would
7688 not want to be both on the list of non-local labels and on
7689 the list of forced labels. */
7690 FORCED_LABEL (label) = 0;
7691 return const0_rtx;
7693 break;
7695 case BUILT_IN_SETJMP_RECEIVER:
7696 /* __builtin_setjmp_receiver is passed the receiver label. */
7697 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7699 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7700 rtx_insn *label_r = label_rtx (label);
7702 expand_builtin_setjmp_receiver (label_r);
7703 return const0_rtx;
7705 break;
7707 /* __builtin_longjmp is passed a pointer to an array of five words.
7708 It's similar to the C library longjmp function but works with
7709 __builtin_setjmp above. */
7710 case BUILT_IN_LONGJMP:
7711 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7713 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7714 VOIDmode, EXPAND_NORMAL);
7715 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7717 if (value != const1_rtx)
7719 error ("%<__builtin_longjmp%> second argument must be 1");
7720 return const0_rtx;
7723 expand_builtin_longjmp (buf_addr, value);
7724 return const0_rtx;
7726 break;
7728 case BUILT_IN_NONLOCAL_GOTO:
7729 target = expand_builtin_nonlocal_goto (exp);
7730 if (target)
7731 return target;
7732 break;
7734 /* This updates the setjmp buffer that is its argument with the value
7735 of the current stack pointer. */
7736 case BUILT_IN_UPDATE_SETJMP_BUF:
7737 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7739 rtx buf_addr
7740 = expand_normal (CALL_EXPR_ARG (exp, 0));
7742 expand_builtin_update_setjmp_buf (buf_addr);
7743 return const0_rtx;
7745 break;
7747 case BUILT_IN_TRAP:
7748 expand_builtin_trap ();
7749 return const0_rtx;
7751 case BUILT_IN_UNREACHABLE:
7752 expand_builtin_unreachable ();
7753 return const0_rtx;
7755 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7756 case BUILT_IN_SIGNBITD32:
7757 case BUILT_IN_SIGNBITD64:
7758 case BUILT_IN_SIGNBITD128:
7759 target = expand_builtin_signbit (exp, target);
7760 if (target)
7761 return target;
7762 break;
7764 /* Various hooks for the DWARF 2 __throw routine. */
7765 case BUILT_IN_UNWIND_INIT:
7766 expand_builtin_unwind_init ();
7767 return const0_rtx;
7768 case BUILT_IN_DWARF_CFA:
7769 return virtual_cfa_rtx;
7770 #ifdef DWARF2_UNWIND_INFO
7771 case BUILT_IN_DWARF_SP_COLUMN:
7772 return expand_builtin_dwarf_sp_column ();
7773 case BUILT_IN_INIT_DWARF_REG_SIZES:
7774 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7775 return const0_rtx;
7776 #endif
7777 case BUILT_IN_FROB_RETURN_ADDR:
7778 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7779 case BUILT_IN_EXTRACT_RETURN_ADDR:
7780 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7781 case BUILT_IN_EH_RETURN:
7782 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7783 CALL_EXPR_ARG (exp, 1));
7784 return const0_rtx;
7785 case BUILT_IN_EH_RETURN_DATA_REGNO:
7786 return expand_builtin_eh_return_data_regno (exp);
7787 case BUILT_IN_EXTEND_POINTER:
7788 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7789 case BUILT_IN_EH_POINTER:
7790 return expand_builtin_eh_pointer (exp);
7791 case BUILT_IN_EH_FILTER:
7792 return expand_builtin_eh_filter (exp);
7793 case BUILT_IN_EH_COPY_VALUES:
7794 return expand_builtin_eh_copy_values (exp);
7796 case BUILT_IN_VA_START:
7797 return expand_builtin_va_start (exp);
7798 case BUILT_IN_VA_END:
7799 return expand_builtin_va_end (exp);
7800 case BUILT_IN_VA_COPY:
7801 return expand_builtin_va_copy (exp);
7802 case BUILT_IN_EXPECT:
7803 return expand_builtin_expect (exp, target);
7804 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7805 return expand_builtin_expect_with_probability (exp, target);
7806 case BUILT_IN_ASSUME_ALIGNED:
7807 return expand_builtin_assume_aligned (exp, target);
7808 case BUILT_IN_PREFETCH:
7809 expand_builtin_prefetch (exp);
7810 return const0_rtx;
7812 case BUILT_IN_INIT_TRAMPOLINE:
7813 return expand_builtin_init_trampoline (exp, true);
7814 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7815 return expand_builtin_init_trampoline (exp, false);
7816 case BUILT_IN_ADJUST_TRAMPOLINE:
7817 return expand_builtin_adjust_trampoline (exp);
7819 case BUILT_IN_INIT_DESCRIPTOR:
7820 return expand_builtin_init_descriptor (exp);
7821 case BUILT_IN_ADJUST_DESCRIPTOR:
7822 return expand_builtin_adjust_descriptor (exp);
7824 case BUILT_IN_FORK:
7825 case BUILT_IN_EXECL:
7826 case BUILT_IN_EXECV:
7827 case BUILT_IN_EXECLP:
7828 case BUILT_IN_EXECLE:
7829 case BUILT_IN_EXECVP:
7830 case BUILT_IN_EXECVE:
7831 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7832 if (target)
7833 return target;
7834 break;
7836 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7837 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7838 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7839 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7840 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7841 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7842 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7843 if (target)
7844 return target;
7845 break;
7847 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7848 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7849 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7850 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7851 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7852 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7853 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7854 if (target)
7855 return target;
7856 break;
7858 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7859 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7860 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7861 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7862 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7863 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7864 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7865 if (target)
7866 return target;
7867 break;
7869 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7870 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7871 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7872 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7873 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7874 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7875 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7876 if (target)
7877 return target;
7878 break;
7880 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7881 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7882 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7883 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7884 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7885 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7886 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7887 if (target)
7888 return target;
7889 break;
7891 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7892 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7893 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7894 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7895 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7896 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7897 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7898 if (target)
7899 return target;
7900 break;
7902 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7903 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7904 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7905 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7906 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7907 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7908 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7909 if (target)
7910 return target;
7911 break;
7913 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7914 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7915 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7916 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7917 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7918 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7919 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7920 if (target)
7921 return target;
7922 break;
7924 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7925 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7926 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7927 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7928 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7929 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7930 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7931 if (target)
7932 return target;
7933 break;
7935 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7936 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7937 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7938 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7939 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7940 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7941 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7942 if (target)
7943 return target;
7944 break;
7946 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7947 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7948 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7949 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7950 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7951 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7952 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7953 if (target)
7954 return target;
7955 break;
7957 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7958 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7959 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7960 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7961 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7962 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7963 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7964 if (target)
7965 return target;
7966 break;
7968 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7969 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7970 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7971 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7972 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7973 if (mode == VOIDmode)
7974 mode = TYPE_MODE (boolean_type_node);
7975 if (!target || !register_operand (target, mode))
7976 target = gen_reg_rtx (mode);
7978 mode = get_builtin_sync_mode
7979 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7980 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7986 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7987 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7988 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7989 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7990 mode = get_builtin_sync_mode
7991 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7992 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7993 if (target)
7994 return target;
7995 break;
7997 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7998 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7999 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8000 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8001 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8002 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8003 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8004 if (target)
8005 return target;
8006 break;
8008 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8009 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8010 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8011 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8012 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8013 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8014 expand_builtin_sync_lock_release (mode, exp);
8015 return const0_rtx;
8017 case BUILT_IN_SYNC_SYNCHRONIZE:
8018 expand_builtin_sync_synchronize ();
8019 return const0_rtx;
8021 case BUILT_IN_ATOMIC_EXCHANGE_1:
8022 case BUILT_IN_ATOMIC_EXCHANGE_2:
8023 case BUILT_IN_ATOMIC_EXCHANGE_4:
8024 case BUILT_IN_ATOMIC_EXCHANGE_8:
8025 case BUILT_IN_ATOMIC_EXCHANGE_16:
8026 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8027 target = expand_builtin_atomic_exchange (mode, exp, target);
8028 if (target)
8029 return target;
8030 break;
8032 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8033 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8035 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8036 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8038 unsigned int nargs, z;
8039 vec<tree, va_gc> *vec;
8041 mode =
8042 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8043 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8044 if (target)
8045 return target;
8047 /* If this is turned into an external library call, the weak parameter
8048 must be dropped to match the expected parameter list. */
8049 nargs = call_expr_nargs (exp);
8050 vec_alloc (vec, nargs - 1);
8051 for (z = 0; z < 3; z++)
8052 vec->quick_push (CALL_EXPR_ARG (exp, z));
8053 /* Skip the boolean weak parameter. */
8054 for (z = 4; z < 6; z++)
8055 vec->quick_push (CALL_EXPR_ARG (exp, z));
8056 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8057 break;
8060 case BUILT_IN_ATOMIC_LOAD_1:
8061 case BUILT_IN_ATOMIC_LOAD_2:
8062 case BUILT_IN_ATOMIC_LOAD_4:
8063 case BUILT_IN_ATOMIC_LOAD_8:
8064 case BUILT_IN_ATOMIC_LOAD_16:
8065 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8066 target = expand_builtin_atomic_load (mode, exp, target);
8067 if (target)
8068 return target;
8069 break;
8071 case BUILT_IN_ATOMIC_STORE_1:
8072 case BUILT_IN_ATOMIC_STORE_2:
8073 case BUILT_IN_ATOMIC_STORE_4:
8074 case BUILT_IN_ATOMIC_STORE_8:
8075 case BUILT_IN_ATOMIC_STORE_16:
8076 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8077 target = expand_builtin_atomic_store (mode, exp);
8078 if (target)
8079 return const0_rtx;
8080 break;
8082 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8083 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8084 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8085 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8086 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8088 enum built_in_function lib;
8089 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8090 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8091 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8092 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8093 ignore, lib);
8094 if (target)
8095 return target;
8096 break;
8098 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8099 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8100 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8101 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8102 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8104 enum built_in_function lib;
8105 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8106 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8107 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8108 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8109 ignore, lib);
8110 if (target)
8111 return target;
8112 break;
8114 case BUILT_IN_ATOMIC_AND_FETCH_1:
8115 case BUILT_IN_ATOMIC_AND_FETCH_2:
8116 case BUILT_IN_ATOMIC_AND_FETCH_4:
8117 case BUILT_IN_ATOMIC_AND_FETCH_8:
8118 case BUILT_IN_ATOMIC_AND_FETCH_16:
8120 enum built_in_function lib;
8121 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8122 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8123 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8124 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8125 ignore, lib);
8126 if (target)
8127 return target;
8128 break;
8130 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8131 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8132 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8133 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8134 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8136 enum built_in_function lib;
8137 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8138 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8139 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8140 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8141 ignore, lib);
8142 if (target)
8143 return target;
8144 break;
8146 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8147 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8148 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8149 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8150 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8152 enum built_in_function lib;
8153 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8154 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8155 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8156 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8157 ignore, lib);
8158 if (target)
8159 return target;
8160 break;
8162 case BUILT_IN_ATOMIC_OR_FETCH_1:
8163 case BUILT_IN_ATOMIC_OR_FETCH_2:
8164 case BUILT_IN_ATOMIC_OR_FETCH_4:
8165 case BUILT_IN_ATOMIC_OR_FETCH_8:
8166 case BUILT_IN_ATOMIC_OR_FETCH_16:
8168 enum built_in_function lib;
8169 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8170 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8171 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8172 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8173 ignore, lib);
8174 if (target)
8175 return target;
8176 break;
8178 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8179 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8180 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8181 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8182 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8183 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8184 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8185 ignore, BUILT_IN_NONE);
8186 if (target)
8187 return target;
8188 break;
8190 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8191 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8192 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8193 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8194 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8195 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8196 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8197 ignore, BUILT_IN_NONE);
8198 if (target)
8199 return target;
8200 break;
8202 case BUILT_IN_ATOMIC_FETCH_AND_1:
8203 case BUILT_IN_ATOMIC_FETCH_AND_2:
8204 case BUILT_IN_ATOMIC_FETCH_AND_4:
8205 case BUILT_IN_ATOMIC_FETCH_AND_8:
8206 case BUILT_IN_ATOMIC_FETCH_AND_16:
8207 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8208 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8209 ignore, BUILT_IN_NONE);
8210 if (target)
8211 return target;
8212 break;
8214 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8215 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8216 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8217 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8218 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8219 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8220 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8221 ignore, BUILT_IN_NONE);
8222 if (target)
8223 return target;
8224 break;
8226 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8227 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8228 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8229 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8230 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8231 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8232 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8233 ignore, BUILT_IN_NONE);
8234 if (target)
8235 return target;
8236 break;
8238 case BUILT_IN_ATOMIC_FETCH_OR_1:
8239 case BUILT_IN_ATOMIC_FETCH_OR_2:
8240 case BUILT_IN_ATOMIC_FETCH_OR_4:
8241 case BUILT_IN_ATOMIC_FETCH_OR_8:
8242 case BUILT_IN_ATOMIC_FETCH_OR_16:
8243 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8244 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8245 ignore, BUILT_IN_NONE);
8246 if (target)
8247 return target;
8248 break;
8250 case BUILT_IN_ATOMIC_TEST_AND_SET:
8251 return expand_builtin_atomic_test_and_set (exp, target);
8253 case BUILT_IN_ATOMIC_CLEAR:
8254 return expand_builtin_atomic_clear (exp);
8256 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8257 return expand_builtin_atomic_always_lock_free (exp);
8259 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8260 target = expand_builtin_atomic_is_lock_free (exp);
8261 if (target)
8262 return target;
8263 break;
8265 case BUILT_IN_ATOMIC_THREAD_FENCE:
8266 expand_builtin_atomic_thread_fence (exp);
8267 return const0_rtx;
8269 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8270 expand_builtin_atomic_signal_fence (exp);
8271 return const0_rtx;
8273 case BUILT_IN_OBJECT_SIZE:
8274 return expand_builtin_object_size (exp);
8276 case BUILT_IN_MEMCPY_CHK:
8277 case BUILT_IN_MEMPCPY_CHK:
8278 case BUILT_IN_MEMMOVE_CHK:
8279 case BUILT_IN_MEMSET_CHK:
8280 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8281 if (target)
8282 return target;
8283 break;
8285 case BUILT_IN_STRCPY_CHK:
8286 case BUILT_IN_STPCPY_CHK:
8287 case BUILT_IN_STRNCPY_CHK:
8288 case BUILT_IN_STPNCPY_CHK:
8289 case BUILT_IN_STRCAT_CHK:
8290 case BUILT_IN_STRNCAT_CHK:
8291 case BUILT_IN_SNPRINTF_CHK:
8292 case BUILT_IN_VSNPRINTF_CHK:
8293 maybe_emit_chk_warning (exp, fcode);
8294 break;
8296 case BUILT_IN_SPRINTF_CHK:
8297 case BUILT_IN_VSPRINTF_CHK:
8298 maybe_emit_sprintf_chk_warning (exp, fcode);
8299 break;
8301 case BUILT_IN_FREE:
8302 if (warn_free_nonheap_object)
8303 maybe_emit_free_warning (exp);
8304 break;
8306 case BUILT_IN_THREAD_POINTER:
8307 return expand_builtin_thread_pointer (exp, target);
8309 case BUILT_IN_SET_THREAD_POINTER:
8310 expand_builtin_set_thread_pointer (exp);
8311 return const0_rtx;
8313 case BUILT_IN_ACC_ON_DEVICE:
8314 /* Do library call, if we failed to expand the builtin when
8315 folding. */
8316 break;
8318 case BUILT_IN_GOACC_PARLEVEL_ID:
8319 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8320 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8322 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8323 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8325 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8326 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8327 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8328 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8329 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8330 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8331 return expand_speculation_safe_value (mode, exp, target, ignore);
8333 default: /* just do library call, if unknown builtin */
8334 break;
8337 /* The switch statement above can drop through to cause the function
8338 to be called normally. */
8339 return expand_call (exp, target, ignore);
8342 /* Determine whether a tree node represents a call to a built-in
8343 function. If the tree T is a call to a built-in function with
8344 the right number of arguments of the appropriate types, return
8345 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8346 Otherwise the return value is END_BUILTINS. */
8348 enum built_in_function
8349 builtin_mathfn_code (const_tree t)
8351 const_tree fndecl, arg, parmlist;
8352 const_tree argtype, parmtype;
8353 const_call_expr_arg_iterator iter;
8355 if (TREE_CODE (t) != CALL_EXPR)
8356 return END_BUILTINS;
8358 fndecl = get_callee_fndecl (t);
8359 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8360 return END_BUILTINS;
8362 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8363 init_const_call_expr_arg_iterator (t, &iter);
8364 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8366 /* If a function doesn't take a variable number of arguments,
8367 the last element in the list will have type `void'. */
8368 parmtype = TREE_VALUE (parmlist);
8369 if (VOID_TYPE_P (parmtype))
8371 if (more_const_call_expr_args_p (&iter))
8372 return END_BUILTINS;
8373 return DECL_FUNCTION_CODE (fndecl);
8376 if (! more_const_call_expr_args_p (&iter))
8377 return END_BUILTINS;
8379 arg = next_const_call_expr_arg (&iter);
8380 argtype = TREE_TYPE (arg);
8382 if (SCALAR_FLOAT_TYPE_P (parmtype))
8384 if (! SCALAR_FLOAT_TYPE_P (argtype))
8385 return END_BUILTINS;
8387 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8389 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8390 return END_BUILTINS;
8392 else if (POINTER_TYPE_P (parmtype))
8394 if (! POINTER_TYPE_P (argtype))
8395 return END_BUILTINS;
8397 else if (INTEGRAL_TYPE_P (parmtype))
8399 if (! INTEGRAL_TYPE_P (argtype))
8400 return END_BUILTINS;
8402 else
8403 return END_BUILTINS;
8406 /* Variable-length argument list. */
8407 return DECL_FUNCTION_CODE (fndecl);
8410 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8411 evaluate to a constant. */
8413 static tree
8414 fold_builtin_constant_p (tree arg)
8416 /* We return 1 for a numeric type that's known to be a constant
8417 value at compile-time or for an aggregate type that's a
8418 literal constant. */
8419 STRIP_NOPS (arg);
8421 /* If we know this is a constant, emit the constant of one. */
8422 if (CONSTANT_CLASS_P (arg)
8423 || (TREE_CODE (arg) == CONSTRUCTOR
8424 && TREE_CONSTANT (arg)))
8425 return integer_one_node;
8426 if (TREE_CODE (arg) == ADDR_EXPR)
8428 tree op = TREE_OPERAND (arg, 0);
8429 if (TREE_CODE (op) == STRING_CST
8430 || (TREE_CODE (op) == ARRAY_REF
8431 && integer_zerop (TREE_OPERAND (op, 1))
8432 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8433 return integer_one_node;
8436 /* If this expression has side effects, show we don't know it to be a
8437 constant. Likewise if it's a pointer or aggregate type since in
8438 those case we only want literals, since those are only optimized
8439 when generating RTL, not later.
8440 And finally, if we are compiling an initializer, not code, we
8441 need to return a definite result now; there's not going to be any
8442 more optimization done. */
8443 if (TREE_SIDE_EFFECTS (arg)
8444 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8445 || POINTER_TYPE_P (TREE_TYPE (arg))
8446 || cfun == 0
8447 || folding_initializer
8448 || force_folding_builtin_constant_p)
8449 return integer_zero_node;
8451 return NULL_TREE;
8454 /* Create builtin_expect or builtin_expect_with_probability
8455 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8456 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8457 builtin_expect_with_probability instead uses third argument as PROBABILITY
8458 value. */
8460 static tree
8461 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8462 tree predictor, tree probability)
8464 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8466 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8467 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8468 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8469 ret_type = TREE_TYPE (TREE_TYPE (fn));
8470 pred_type = TREE_VALUE (arg_types);
8471 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8473 pred = fold_convert_loc (loc, pred_type, pred);
8474 expected = fold_convert_loc (loc, expected_type, expected);
8476 if (probability)
8477 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8478 else
8479 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8480 predictor);
8482 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8483 build_int_cst (ret_type, 0));
8486 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8487 NULL_TREE if no simplification is possible. */
8489 tree
8490 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8491 tree arg3)
8493 tree inner, fndecl, inner_arg0;
8494 enum tree_code code;
8496 /* Distribute the expected value over short-circuiting operators.
8497 See through the cast from truthvalue_type_node to long. */
8498 inner_arg0 = arg0;
8499 while (CONVERT_EXPR_P (inner_arg0)
8500 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8501 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8502 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8504 /* If this is a builtin_expect within a builtin_expect keep the
8505 inner one. See through a comparison against a constant. It
8506 might have been added to create a thruthvalue. */
8507 inner = inner_arg0;
8509 if (COMPARISON_CLASS_P (inner)
8510 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8511 inner = TREE_OPERAND (inner, 0);
8513 if (TREE_CODE (inner) == CALL_EXPR
8514 && (fndecl = get_callee_fndecl (inner))
8515 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8516 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8517 return arg0;
8519 inner = inner_arg0;
8520 code = TREE_CODE (inner);
8521 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8523 tree op0 = TREE_OPERAND (inner, 0);
8524 tree op1 = TREE_OPERAND (inner, 1);
8525 arg1 = save_expr (arg1);
8527 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8528 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8529 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8531 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8534 /* If the argument isn't invariant then there's nothing else we can do. */
8535 if (!TREE_CONSTANT (inner_arg0))
8536 return NULL_TREE;
8538 /* If we expect that a comparison against the argument will fold to
8539 a constant return the constant. In practice, this means a true
8540 constant or the address of a non-weak symbol. */
8541 inner = inner_arg0;
8542 STRIP_NOPS (inner);
8543 if (TREE_CODE (inner) == ADDR_EXPR)
8547 inner = TREE_OPERAND (inner, 0);
8549 while (TREE_CODE (inner) == COMPONENT_REF
8550 || TREE_CODE (inner) == ARRAY_REF);
8551 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8552 return NULL_TREE;
8555 /* Otherwise, ARG0 already has the proper type for the return value. */
8556 return arg0;
8559 /* Fold a call to __builtin_classify_type with argument ARG. */
8561 static tree
8562 fold_builtin_classify_type (tree arg)
8564 if (arg == 0)
8565 return build_int_cst (integer_type_node, no_type_class);
8567 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8570 /* Fold a call to __builtin_strlen with argument ARG. */
8572 static tree
8573 fold_builtin_strlen (location_t loc, tree type, tree arg)
8575 if (!validate_arg (arg, POINTER_TYPE))
8576 return NULL_TREE;
8577 else
8579 c_strlen_data lendata = { };
8580 tree len = c_strlen (arg, 0, &lendata);
8582 if (len)
8583 return fold_convert_loc (loc, type, len);
8585 if (!lendata.decl)
8586 c_strlen (arg, 1, &lendata);
8588 if (lendata.decl)
8590 if (EXPR_HAS_LOCATION (arg))
8591 loc = EXPR_LOCATION (arg);
8592 else if (loc == UNKNOWN_LOCATION)
8593 loc = input_location;
8594 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8597 return NULL_TREE;
8601 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8603 static tree
8604 fold_builtin_inf (location_t loc, tree type, int warn)
8606 REAL_VALUE_TYPE real;
8608 /* __builtin_inff is intended to be usable to define INFINITY on all
8609 targets. If an infinity is not available, INFINITY expands "to a
8610 positive constant of type float that overflows at translation
8611 time", footnote "In this case, using INFINITY will violate the
8612 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8613 Thus we pedwarn to ensure this constraint violation is
8614 diagnosed. */
8615 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8616 pedwarn (loc, 0, "target format does not support infinity");
8618 real_inf (&real);
8619 return build_real (type, real);
8622 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8623 NULL_TREE if no simplification can be made. */
8625 static tree
8626 fold_builtin_sincos (location_t loc,
8627 tree arg0, tree arg1, tree arg2)
8629 tree type;
8630 tree fndecl, call = NULL_TREE;
8632 if (!validate_arg (arg0, REAL_TYPE)
8633 || !validate_arg (arg1, POINTER_TYPE)
8634 || !validate_arg (arg2, POINTER_TYPE))
8635 return NULL_TREE;
8637 type = TREE_TYPE (arg0);
8639 /* Calculate the result when the argument is a constant. */
8640 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8641 if (fn == END_BUILTINS)
8642 return NULL_TREE;
8644 /* Canonicalize sincos to cexpi. */
8645 if (TREE_CODE (arg0) == REAL_CST)
8647 tree complex_type = build_complex_type (type);
8648 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8650 if (!call)
8652 if (!targetm.libc_has_function (function_c99_math_complex)
8653 || !builtin_decl_implicit_p (fn))
8654 return NULL_TREE;
8655 fndecl = builtin_decl_explicit (fn);
8656 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8657 call = builtin_save_expr (call);
8660 tree ptype = build_pointer_type (type);
8661 arg1 = fold_convert (ptype, arg1);
8662 arg2 = fold_convert (ptype, arg2);
8663 return build2 (COMPOUND_EXPR, void_type_node,
8664 build2 (MODIFY_EXPR, void_type_node,
8665 build_fold_indirect_ref_loc (loc, arg1),
8666 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8667 build2 (MODIFY_EXPR, void_type_node,
8668 build_fold_indirect_ref_loc (loc, arg2),
8669 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8672 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8673 Return NULL_TREE if no simplification can be made. */
8675 static tree
8676 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8678 if (!validate_arg (arg1, POINTER_TYPE)
8679 || !validate_arg (arg2, POINTER_TYPE)
8680 || !validate_arg (len, INTEGER_TYPE))
8681 return NULL_TREE;
8683 /* If the LEN parameter is zero, return zero. */
8684 if (integer_zerop (len))
8685 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8686 arg1, arg2);
8688 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8689 if (operand_equal_p (arg1, arg2, 0))
8690 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8692 /* If len parameter is one, return an expression corresponding to
8693 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8694 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8696 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8697 tree cst_uchar_ptr_node
8698 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8700 tree ind1
8701 = fold_convert_loc (loc, integer_type_node,
8702 build1 (INDIRECT_REF, cst_uchar_node,
8703 fold_convert_loc (loc,
8704 cst_uchar_ptr_node,
8705 arg1)));
8706 tree ind2
8707 = fold_convert_loc (loc, integer_type_node,
8708 build1 (INDIRECT_REF, cst_uchar_node,
8709 fold_convert_loc (loc,
8710 cst_uchar_ptr_node,
8711 arg2)));
8712 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8715 return NULL_TREE;
8718 /* Fold a call to builtin isascii with argument ARG. */
8720 static tree
8721 fold_builtin_isascii (location_t loc, tree arg)
8723 if (!validate_arg (arg, INTEGER_TYPE))
8724 return NULL_TREE;
8725 else
8727 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8728 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8729 build_int_cst (integer_type_node,
8730 ~ (unsigned HOST_WIDE_INT) 0x7f));
8731 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8732 arg, integer_zero_node);
8736 /* Fold a call to builtin toascii with argument ARG. */
8738 static tree
8739 fold_builtin_toascii (location_t loc, tree arg)
8741 if (!validate_arg (arg, INTEGER_TYPE))
8742 return NULL_TREE;
8744 /* Transform toascii(c) -> (c & 0x7f). */
8745 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8746 build_int_cst (integer_type_node, 0x7f));
8749 /* Fold a call to builtin isdigit with argument ARG. */
8751 static tree
8752 fold_builtin_isdigit (location_t loc, tree arg)
8754 if (!validate_arg (arg, INTEGER_TYPE))
8755 return NULL_TREE;
8756 else
8758 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8759 /* According to the C standard, isdigit is unaffected by locale.
8760 However, it definitely is affected by the target character set. */
8761 unsigned HOST_WIDE_INT target_digit0
8762 = lang_hooks.to_target_charset ('0');
8764 if (target_digit0 == 0)
8765 return NULL_TREE;
8767 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8768 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8769 build_int_cst (unsigned_type_node, target_digit0));
8770 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8771 build_int_cst (unsigned_type_node, 9));
8775 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8777 static tree
8778 fold_builtin_fabs (location_t loc, tree arg, tree type)
8780 if (!validate_arg (arg, REAL_TYPE))
8781 return NULL_TREE;
8783 arg = fold_convert_loc (loc, type, arg);
8784 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8787 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8789 static tree
8790 fold_builtin_abs (location_t loc, tree arg, tree type)
8792 if (!validate_arg (arg, INTEGER_TYPE))
8793 return NULL_TREE;
8795 arg = fold_convert_loc (loc, type, arg);
8796 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8799 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8801 static tree
8802 fold_builtin_carg (location_t loc, tree arg, tree type)
8804 if (validate_arg (arg, COMPLEX_TYPE)
8805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8807 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8809 if (atan2_fn)
8811 tree new_arg = builtin_save_expr (arg);
8812 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8813 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8814 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8818 return NULL_TREE;
8821 /* Fold a call to builtin frexp, we can assume the base is 2. */
8823 static tree
8824 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8826 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8827 return NULL_TREE;
8829 STRIP_NOPS (arg0);
8831 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8832 return NULL_TREE;
8834 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8836 /* Proceed if a valid pointer type was passed in. */
8837 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8839 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8840 tree frac, exp;
8842 switch (value->cl)
8844 case rvc_zero:
8845 /* For +-0, return (*exp = 0, +-0). */
8846 exp = integer_zero_node;
8847 frac = arg0;
8848 break;
8849 case rvc_nan:
8850 case rvc_inf:
8851 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8852 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8853 case rvc_normal:
8855 /* Since the frexp function always expects base 2, and in
8856 GCC normalized significands are already in the range
8857 [0.5, 1.0), we have exactly what frexp wants. */
8858 REAL_VALUE_TYPE frac_rvt = *value;
8859 SET_REAL_EXP (&frac_rvt, 0);
8860 frac = build_real (rettype, frac_rvt);
8861 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8863 break;
8864 default:
8865 gcc_unreachable ();
8868 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8869 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8870 TREE_SIDE_EFFECTS (arg1) = 1;
8871 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8874 return NULL_TREE;
8877 /* Fold a call to builtin modf. */
8879 static tree
8880 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8882 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8883 return NULL_TREE;
8885 STRIP_NOPS (arg0);
8887 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8888 return NULL_TREE;
8890 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8892 /* Proceed if a valid pointer type was passed in. */
8893 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8895 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8896 REAL_VALUE_TYPE trunc, frac;
8898 switch (value->cl)
8900 case rvc_nan:
8901 case rvc_zero:
8902 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8903 trunc = frac = *value;
8904 break;
8905 case rvc_inf:
8906 /* For +-Inf, return (*arg1 = arg0, +-0). */
8907 frac = dconst0;
8908 frac.sign = value->sign;
8909 trunc = *value;
8910 break;
8911 case rvc_normal:
8912 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8913 real_trunc (&trunc, VOIDmode, value);
8914 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8915 /* If the original number was negative and already
8916 integral, then the fractional part is -0.0. */
8917 if (value->sign && frac.cl == rvc_zero)
8918 frac.sign = value->sign;
8919 break;
8922 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8923 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8924 build_real (rettype, trunc));
8925 TREE_SIDE_EFFECTS (arg1) = 1;
8926 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8927 build_real (rettype, frac));
8930 return NULL_TREE;
8933 /* Given a location LOC, an interclass builtin function decl FNDECL
8934 and its single argument ARG, return an folded expression computing
8935 the same, or NULL_TREE if we either couldn't or didn't want to fold
8936 (the latter happen if there's an RTL instruction available). */
8938 static tree
8939 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8941 machine_mode mode;
8943 if (!validate_arg (arg, REAL_TYPE))
8944 return NULL_TREE;
8946 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8947 return NULL_TREE;
8949 mode = TYPE_MODE (TREE_TYPE (arg));
8951 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8953 /* If there is no optab, try generic code. */
8954 switch (DECL_FUNCTION_CODE (fndecl))
8956 tree result;
8958 CASE_FLT_FN (BUILT_IN_ISINF):
8960 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8961 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8962 tree type = TREE_TYPE (arg);
8963 REAL_VALUE_TYPE r;
8964 char buf[128];
8966 if (is_ibm_extended)
8968 /* NaN and Inf are encoded in the high-order double value
8969 only. The low-order value is not significant. */
8970 type = double_type_node;
8971 mode = DFmode;
8972 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8974 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8975 real_from_string (&r, buf);
8976 result = build_call_expr (isgr_fn, 2,
8977 fold_build1_loc (loc, ABS_EXPR, type, arg),
8978 build_real (type, r));
8979 return result;
8981 CASE_FLT_FN (BUILT_IN_FINITE):
8982 case BUILT_IN_ISFINITE:
8984 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8985 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8986 tree type = TREE_TYPE (arg);
8987 REAL_VALUE_TYPE r;
8988 char buf[128];
8990 if (is_ibm_extended)
8992 /* NaN and Inf are encoded in the high-order double value
8993 only. The low-order value is not significant. */
8994 type = double_type_node;
8995 mode = DFmode;
8996 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8998 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8999 real_from_string (&r, buf);
9000 result = build_call_expr (isle_fn, 2,
9001 fold_build1_loc (loc, ABS_EXPR, type, arg),
9002 build_real (type, r));
9003 /*result = fold_build2_loc (loc, UNGT_EXPR,
9004 TREE_TYPE (TREE_TYPE (fndecl)),
9005 fold_build1_loc (loc, ABS_EXPR, type, arg),
9006 build_real (type, r));
9007 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9008 TREE_TYPE (TREE_TYPE (fndecl)),
9009 result);*/
9010 return result;
9012 case BUILT_IN_ISNORMAL:
9014 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9015 islessequal(fabs(x),DBL_MAX). */
9016 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9017 tree type = TREE_TYPE (arg);
9018 tree orig_arg, max_exp, min_exp;
9019 machine_mode orig_mode = mode;
9020 REAL_VALUE_TYPE rmax, rmin;
9021 char buf[128];
9023 orig_arg = arg = builtin_save_expr (arg);
9024 if (is_ibm_extended)
9026 /* Use double to test the normal range of IBM extended
9027 precision. Emin for IBM extended precision is
9028 different to emin for IEEE double, being 53 higher
9029 since the low double exponent is at least 53 lower
9030 than the high double exponent. */
9031 type = double_type_node;
9032 mode = DFmode;
9033 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9035 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9037 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9038 real_from_string (&rmax, buf);
9039 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9040 real_from_string (&rmin, buf);
9041 max_exp = build_real (type, rmax);
9042 min_exp = build_real (type, rmin);
9044 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9045 if (is_ibm_extended)
9047 /* Testing the high end of the range is done just using
9048 the high double, using the same test as isfinite().
9049 For the subnormal end of the range we first test the
9050 high double, then if its magnitude is equal to the
9051 limit of 0x1p-969, we test whether the low double is
9052 non-zero and opposite sign to the high double. */
9053 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9054 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9055 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9056 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9057 arg, min_exp);
9058 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9059 complex_double_type_node, orig_arg);
9060 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9061 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9062 tree zero = build_real (type, dconst0);
9063 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9064 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9065 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9066 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9067 fold_build3 (COND_EXPR,
9068 integer_type_node,
9069 hilt, logt, lolt));
9070 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9071 eq_min, ok_lo);
9072 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9073 gt_min, eq_min);
9075 else
9077 tree const isge_fn
9078 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9079 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9081 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9082 max_exp, min_exp);
9083 return result;
9085 default:
9086 break;
9089 return NULL_TREE;
9092 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9093 ARG is the argument for the call. */
9095 static tree
9096 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9098 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9100 if (!validate_arg (arg, REAL_TYPE))
9101 return NULL_TREE;
9103 switch (builtin_index)
9105 case BUILT_IN_ISINF:
9106 if (!HONOR_INFINITIES (arg))
9107 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9109 return NULL_TREE;
9111 case BUILT_IN_ISINF_SIGN:
9113 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9114 /* In a boolean context, GCC will fold the inner COND_EXPR to
9115 1. So e.g. "if (isinf_sign(x))" would be folded to just
9116 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9117 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9118 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9119 tree tmp = NULL_TREE;
9121 arg = builtin_save_expr (arg);
9123 if (signbit_fn && isinf_fn)
9125 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9126 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9128 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9129 signbit_call, integer_zero_node);
9130 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9131 isinf_call, integer_zero_node);
9133 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9134 integer_minus_one_node, integer_one_node);
9135 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9136 isinf_call, tmp,
9137 integer_zero_node);
9140 return tmp;
9143 case BUILT_IN_ISFINITE:
9144 if (!HONOR_NANS (arg)
9145 && !HONOR_INFINITIES (arg))
9146 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9148 return NULL_TREE;
9150 case BUILT_IN_ISNAN:
9151 if (!HONOR_NANS (arg))
9152 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9155 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9156 if (is_ibm_extended)
9158 /* NaN and Inf are encoded in the high-order double value
9159 only. The low-order value is not significant. */
9160 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9163 arg = builtin_save_expr (arg);
9164 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9166 default:
9167 gcc_unreachable ();
9171 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9172 This builtin will generate code to return the appropriate floating
9173 point classification depending on the value of the floating point
9174 number passed in. The possible return values must be supplied as
9175 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9176 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9177 one floating point argument which is "type generic". */
9179 static tree
9180 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9182 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9183 arg, type, res, tmp;
9184 machine_mode mode;
9185 REAL_VALUE_TYPE r;
9186 char buf[128];
9188 /* Verify the required arguments in the original call. */
9189 if (nargs != 6
9190 || !validate_arg (args[0], INTEGER_TYPE)
9191 || !validate_arg (args[1], INTEGER_TYPE)
9192 || !validate_arg (args[2], INTEGER_TYPE)
9193 || !validate_arg (args[3], INTEGER_TYPE)
9194 || !validate_arg (args[4], INTEGER_TYPE)
9195 || !validate_arg (args[5], REAL_TYPE))
9196 return NULL_TREE;
9198 fp_nan = args[0];
9199 fp_infinite = args[1];
9200 fp_normal = args[2];
9201 fp_subnormal = args[3];
9202 fp_zero = args[4];
9203 arg = args[5];
9204 type = TREE_TYPE (arg);
9205 mode = TYPE_MODE (type);
9206 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9208 /* fpclassify(x) ->
9209 isnan(x) ? FP_NAN :
9210 (fabs(x) == Inf ? FP_INFINITE :
9211 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9212 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9214 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9215 build_real (type, dconst0));
9216 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9217 tmp, fp_zero, fp_subnormal);
9219 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9220 real_from_string (&r, buf);
9221 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9222 arg, build_real (type, r));
9223 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9225 if (HONOR_INFINITIES (mode))
9227 real_inf (&r);
9228 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9229 build_real (type, r));
9230 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9231 fp_infinite, res);
9234 if (HONOR_NANS (mode))
9236 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9237 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9240 return res;
9243 /* Fold a call to an unordered comparison function such as
9244 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9245 being called and ARG0 and ARG1 are the arguments for the call.
9246 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9247 the opposite of the desired result. UNORDERED_CODE is used
9248 for modes that can hold NaNs and ORDERED_CODE is used for
9249 the rest. */
9251 static tree
9252 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9253 enum tree_code unordered_code,
9254 enum tree_code ordered_code)
9256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9257 enum tree_code code;
9258 tree type0, type1;
9259 enum tree_code code0, code1;
9260 tree cmp_type = NULL_TREE;
9262 type0 = TREE_TYPE (arg0);
9263 type1 = TREE_TYPE (arg1);
9265 code0 = TREE_CODE (type0);
9266 code1 = TREE_CODE (type1);
9268 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9269 /* Choose the wider of two real types. */
9270 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9271 ? type0 : type1;
9272 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9273 cmp_type = type0;
9274 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9275 cmp_type = type1;
9277 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9278 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9280 if (unordered_code == UNORDERED_EXPR)
9282 if (!HONOR_NANS (arg0))
9283 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9284 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9287 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9288 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9289 fold_build2_loc (loc, code, type, arg0, arg1));
9292 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9293 arithmetics if it can never overflow, or into internal functions that
9294 return both result of arithmetics and overflowed boolean flag in
9295 a complex integer result, or some other check for overflow.
9296 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9297 checking part of that. */
9299 static tree
9300 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9301 tree arg0, tree arg1, tree arg2)
9303 enum internal_fn ifn = IFN_LAST;
9304 /* The code of the expression corresponding to the built-in. */
9305 enum tree_code opcode = ERROR_MARK;
9306 bool ovf_only = false;
9308 switch (fcode)
9310 case BUILT_IN_ADD_OVERFLOW_P:
9311 ovf_only = true;
9312 /* FALLTHRU */
9313 case BUILT_IN_ADD_OVERFLOW:
9314 case BUILT_IN_SADD_OVERFLOW:
9315 case BUILT_IN_SADDL_OVERFLOW:
9316 case BUILT_IN_SADDLL_OVERFLOW:
9317 case BUILT_IN_UADD_OVERFLOW:
9318 case BUILT_IN_UADDL_OVERFLOW:
9319 case BUILT_IN_UADDLL_OVERFLOW:
9320 opcode = PLUS_EXPR;
9321 ifn = IFN_ADD_OVERFLOW;
9322 break;
9323 case BUILT_IN_SUB_OVERFLOW_P:
9324 ovf_only = true;
9325 /* FALLTHRU */
9326 case BUILT_IN_SUB_OVERFLOW:
9327 case BUILT_IN_SSUB_OVERFLOW:
9328 case BUILT_IN_SSUBL_OVERFLOW:
9329 case BUILT_IN_SSUBLL_OVERFLOW:
9330 case BUILT_IN_USUB_OVERFLOW:
9331 case BUILT_IN_USUBL_OVERFLOW:
9332 case BUILT_IN_USUBLL_OVERFLOW:
9333 opcode = MINUS_EXPR;
9334 ifn = IFN_SUB_OVERFLOW;
9335 break;
9336 case BUILT_IN_MUL_OVERFLOW_P:
9337 ovf_only = true;
9338 /* FALLTHRU */
9339 case BUILT_IN_MUL_OVERFLOW:
9340 case BUILT_IN_SMUL_OVERFLOW:
9341 case BUILT_IN_SMULL_OVERFLOW:
9342 case BUILT_IN_SMULLL_OVERFLOW:
9343 case BUILT_IN_UMUL_OVERFLOW:
9344 case BUILT_IN_UMULL_OVERFLOW:
9345 case BUILT_IN_UMULLL_OVERFLOW:
9346 opcode = MULT_EXPR;
9347 ifn = IFN_MUL_OVERFLOW;
9348 break;
9349 default:
9350 gcc_unreachable ();
9353 /* For the "generic" overloads, the first two arguments can have different
9354 types and the last argument determines the target type to use to check
9355 for overflow. The arguments of the other overloads all have the same
9356 type. */
9357 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9359 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9360 arguments are constant, attempt to fold the built-in call into a constant
9361 expression indicating whether or not it detected an overflow. */
9362 if (ovf_only
9363 && TREE_CODE (arg0) == INTEGER_CST
9364 && TREE_CODE (arg1) == INTEGER_CST)
9365 /* Perform the computation in the target type and check for overflow. */
9366 return omit_one_operand_loc (loc, boolean_type_node,
9367 arith_overflowed_p (opcode, type, arg0, arg1)
9368 ? boolean_true_node : boolean_false_node,
9369 arg2);
9371 tree intres, ovfres;
9372 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9374 intres = fold_binary_loc (loc, opcode, type,
9375 fold_convert_loc (loc, type, arg0),
9376 fold_convert_loc (loc, type, arg1));
9377 if (TREE_OVERFLOW (intres))
9378 intres = drop_tree_overflow (intres);
9379 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9380 ? boolean_true_node : boolean_false_node);
9382 else
9384 tree ctype = build_complex_type (type);
9385 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9386 arg0, arg1);
9387 tree tgt = save_expr (call);
9388 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9389 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9390 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9393 if (ovf_only)
9394 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9396 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9397 tree store
9398 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9399 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9402 /* Fold a call to __builtin_FILE to a constant string. */
9404 static inline tree
9405 fold_builtin_FILE (location_t loc)
9407 if (const char *fname = LOCATION_FILE (loc))
9409 /* The documentation says this builtin is equivalent to the preprocessor
9410 __FILE__ macro so it appears appropriate to use the same file prefix
9411 mappings. */
9412 fname = remap_macro_filename (fname);
9413 return build_string_literal (strlen (fname) + 1, fname);
9416 return build_string_literal (1, "");
9419 /* Fold a call to __builtin_FUNCTION to a constant string. */
9421 static inline tree
9422 fold_builtin_FUNCTION ()
9424 const char *name = "";
9426 if (current_function_decl)
9427 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9429 return build_string_literal (strlen (name) + 1, name);
9432 /* Fold a call to __builtin_LINE to an integer constant. */
9434 static inline tree
9435 fold_builtin_LINE (location_t loc, tree type)
9437 return build_int_cst (type, LOCATION_LINE (loc));
9440 /* Fold a call to built-in function FNDECL with 0 arguments.
9441 This function returns NULL_TREE if no simplification was possible. */
9443 static tree
9444 fold_builtin_0 (location_t loc, tree fndecl)
9446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9448 switch (fcode)
9450 case BUILT_IN_FILE:
9451 return fold_builtin_FILE (loc);
9453 case BUILT_IN_FUNCTION:
9454 return fold_builtin_FUNCTION ();
9456 case BUILT_IN_LINE:
9457 return fold_builtin_LINE (loc, type);
9459 CASE_FLT_FN (BUILT_IN_INF):
9460 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9461 case BUILT_IN_INFD32:
9462 case BUILT_IN_INFD64:
9463 case BUILT_IN_INFD128:
9464 return fold_builtin_inf (loc, type, true);
9466 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9467 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9468 return fold_builtin_inf (loc, type, false);
9470 case BUILT_IN_CLASSIFY_TYPE:
9471 return fold_builtin_classify_type (NULL_TREE);
9473 default:
9474 break;
9476 return NULL_TREE;
9479 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9480 This function returns NULL_TREE if no simplification was possible. */
9482 static tree
9483 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9485 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9486 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9488 if (TREE_CODE (arg0) == ERROR_MARK)
9489 return NULL_TREE;
9491 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9492 return ret;
9494 switch (fcode)
9496 case BUILT_IN_CONSTANT_P:
9498 tree val = fold_builtin_constant_p (arg0);
9500 /* Gimplification will pull the CALL_EXPR for the builtin out of
9501 an if condition. When not optimizing, we'll not CSE it back.
9502 To avoid link error types of regressions, return false now. */
9503 if (!val && !optimize)
9504 val = integer_zero_node;
9506 return val;
9509 case BUILT_IN_CLASSIFY_TYPE:
9510 return fold_builtin_classify_type (arg0);
9512 case BUILT_IN_STRLEN:
9513 return fold_builtin_strlen (loc, type, arg0);
9515 CASE_FLT_FN (BUILT_IN_FABS):
9516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9517 case BUILT_IN_FABSD32:
9518 case BUILT_IN_FABSD64:
9519 case BUILT_IN_FABSD128:
9520 return fold_builtin_fabs (loc, arg0, type);
9522 case BUILT_IN_ABS:
9523 case BUILT_IN_LABS:
9524 case BUILT_IN_LLABS:
9525 case BUILT_IN_IMAXABS:
9526 return fold_builtin_abs (loc, arg0, type);
9528 CASE_FLT_FN (BUILT_IN_CONJ):
9529 if (validate_arg (arg0, COMPLEX_TYPE)
9530 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9531 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9532 break;
9534 CASE_FLT_FN (BUILT_IN_CREAL):
9535 if (validate_arg (arg0, COMPLEX_TYPE)
9536 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9537 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9538 break;
9540 CASE_FLT_FN (BUILT_IN_CIMAG):
9541 if (validate_arg (arg0, COMPLEX_TYPE)
9542 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9543 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9544 break;
9546 CASE_FLT_FN (BUILT_IN_CARG):
9547 return fold_builtin_carg (loc, arg0, type);
9549 case BUILT_IN_ISASCII:
9550 return fold_builtin_isascii (loc, arg0);
9552 case BUILT_IN_TOASCII:
9553 return fold_builtin_toascii (loc, arg0);
9555 case BUILT_IN_ISDIGIT:
9556 return fold_builtin_isdigit (loc, arg0);
9558 CASE_FLT_FN (BUILT_IN_FINITE):
9559 case BUILT_IN_FINITED32:
9560 case BUILT_IN_FINITED64:
9561 case BUILT_IN_FINITED128:
9562 case BUILT_IN_ISFINITE:
9564 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9565 if (ret)
9566 return ret;
9567 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9570 CASE_FLT_FN (BUILT_IN_ISINF):
9571 case BUILT_IN_ISINFD32:
9572 case BUILT_IN_ISINFD64:
9573 case BUILT_IN_ISINFD128:
9575 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9576 if (ret)
9577 return ret;
9578 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9581 case BUILT_IN_ISNORMAL:
9582 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9584 case BUILT_IN_ISINF_SIGN:
9585 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9587 CASE_FLT_FN (BUILT_IN_ISNAN):
9588 case BUILT_IN_ISNAND32:
9589 case BUILT_IN_ISNAND64:
9590 case BUILT_IN_ISNAND128:
9591 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9593 case BUILT_IN_FREE:
9594 if (integer_zerop (arg0))
9595 return build_empty_stmt (loc);
9596 break;
9598 default:
9599 break;
9602 return NULL_TREE;
9606 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9607 This function returns NULL_TREE if no simplification was possible. */
9609 static tree
9610 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9612 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9613 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9615 if (TREE_CODE (arg0) == ERROR_MARK
9616 || TREE_CODE (arg1) == ERROR_MARK)
9617 return NULL_TREE;
9619 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9620 return ret;
9622 switch (fcode)
9624 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9625 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9626 if (validate_arg (arg0, REAL_TYPE)
9627 && validate_arg (arg1, POINTER_TYPE))
9628 return do_mpfr_lgamma_r (arg0, arg1, type);
9629 break;
9631 CASE_FLT_FN (BUILT_IN_FREXP):
9632 return fold_builtin_frexp (loc, arg0, arg1, type);
9634 CASE_FLT_FN (BUILT_IN_MODF):
9635 return fold_builtin_modf (loc, arg0, arg1, type);
9637 case BUILT_IN_STRSPN:
9638 return fold_builtin_strspn (loc, arg0, arg1);
9640 case BUILT_IN_STRCSPN:
9641 return fold_builtin_strcspn (loc, arg0, arg1);
9643 case BUILT_IN_STRPBRK:
9644 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9646 case BUILT_IN_EXPECT:
9647 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9649 case BUILT_IN_ISGREATER:
9650 return fold_builtin_unordered_cmp (loc, fndecl,
9651 arg0, arg1, UNLE_EXPR, LE_EXPR);
9652 case BUILT_IN_ISGREATEREQUAL:
9653 return fold_builtin_unordered_cmp (loc, fndecl,
9654 arg0, arg1, UNLT_EXPR, LT_EXPR);
9655 case BUILT_IN_ISLESS:
9656 return fold_builtin_unordered_cmp (loc, fndecl,
9657 arg0, arg1, UNGE_EXPR, GE_EXPR);
9658 case BUILT_IN_ISLESSEQUAL:
9659 return fold_builtin_unordered_cmp (loc, fndecl,
9660 arg0, arg1, UNGT_EXPR, GT_EXPR);
9661 case BUILT_IN_ISLESSGREATER:
9662 return fold_builtin_unordered_cmp (loc, fndecl,
9663 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9664 case BUILT_IN_ISUNORDERED:
9665 return fold_builtin_unordered_cmp (loc, fndecl,
9666 arg0, arg1, UNORDERED_EXPR,
9667 NOP_EXPR);
9669 /* We do the folding for va_start in the expander. */
9670 case BUILT_IN_VA_START:
9671 break;
9673 case BUILT_IN_OBJECT_SIZE:
9674 return fold_builtin_object_size (arg0, arg1);
9676 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9677 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9679 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9680 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9682 default:
9683 break;
9685 return NULL_TREE;
9688 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9689 and ARG2.
9690 This function returns NULL_TREE if no simplification was possible. */
9692 static tree
9693 fold_builtin_3 (location_t loc, tree fndecl,
9694 tree arg0, tree arg1, tree arg2)
9696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9697 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9699 if (TREE_CODE (arg0) == ERROR_MARK
9700 || TREE_CODE (arg1) == ERROR_MARK
9701 || TREE_CODE (arg2) == ERROR_MARK)
9702 return NULL_TREE;
9704 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9705 arg0, arg1, arg2))
9706 return ret;
9708 switch (fcode)
9711 CASE_FLT_FN (BUILT_IN_SINCOS):
9712 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9714 CASE_FLT_FN (BUILT_IN_REMQUO):
9715 if (validate_arg (arg0, REAL_TYPE)
9716 && validate_arg (arg1, REAL_TYPE)
9717 && validate_arg (arg2, POINTER_TYPE))
9718 return do_mpfr_remquo (arg0, arg1, arg2);
9719 break;
9721 case BUILT_IN_MEMCMP:
9722 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9724 case BUILT_IN_EXPECT:
9725 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9727 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9728 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9730 case BUILT_IN_ADD_OVERFLOW:
9731 case BUILT_IN_SUB_OVERFLOW:
9732 case BUILT_IN_MUL_OVERFLOW:
9733 case BUILT_IN_ADD_OVERFLOW_P:
9734 case BUILT_IN_SUB_OVERFLOW_P:
9735 case BUILT_IN_MUL_OVERFLOW_P:
9736 case BUILT_IN_SADD_OVERFLOW:
9737 case BUILT_IN_SADDL_OVERFLOW:
9738 case BUILT_IN_SADDLL_OVERFLOW:
9739 case BUILT_IN_SSUB_OVERFLOW:
9740 case BUILT_IN_SSUBL_OVERFLOW:
9741 case BUILT_IN_SSUBLL_OVERFLOW:
9742 case BUILT_IN_SMUL_OVERFLOW:
9743 case BUILT_IN_SMULL_OVERFLOW:
9744 case BUILT_IN_SMULLL_OVERFLOW:
9745 case BUILT_IN_UADD_OVERFLOW:
9746 case BUILT_IN_UADDL_OVERFLOW:
9747 case BUILT_IN_UADDLL_OVERFLOW:
9748 case BUILT_IN_USUB_OVERFLOW:
9749 case BUILT_IN_USUBL_OVERFLOW:
9750 case BUILT_IN_USUBLL_OVERFLOW:
9751 case BUILT_IN_UMUL_OVERFLOW:
9752 case BUILT_IN_UMULL_OVERFLOW:
9753 case BUILT_IN_UMULLL_OVERFLOW:
9754 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9756 default:
9757 break;
9759 return NULL_TREE;
9762 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9763 arguments. IGNORE is true if the result of the
9764 function call is ignored. This function returns NULL_TREE if no
9765 simplification was possible. */
9767 tree
9768 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9770 tree ret = NULL_TREE;
9772 switch (nargs)
9774 case 0:
9775 ret = fold_builtin_0 (loc, fndecl);
9776 break;
9777 case 1:
9778 ret = fold_builtin_1 (loc, fndecl, args[0]);
9779 break;
9780 case 2:
9781 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9782 break;
9783 case 3:
9784 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9785 break;
9786 default:
9787 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9788 break;
9790 if (ret)
9792 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9793 SET_EXPR_LOCATION (ret, loc);
9794 return ret;
9796 return NULL_TREE;
9799 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9800 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9801 of arguments in ARGS to be omitted. OLDNARGS is the number of
9802 elements in ARGS. */
9804 static tree
9805 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9806 int skip, tree fndecl, int n, va_list newargs)
9808 int nargs = oldnargs - skip + n;
9809 tree *buffer;
9811 if (n > 0)
9813 int i, j;
9815 buffer = XALLOCAVEC (tree, nargs);
9816 for (i = 0; i < n; i++)
9817 buffer[i] = va_arg (newargs, tree);
9818 for (j = skip; j < oldnargs; j++, i++)
9819 buffer[i] = args[j];
9821 else
9822 buffer = args + skip;
9824 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9827 /* Return true if FNDECL shouldn't be folded right now.
9828 If a built-in function has an inline attribute always_inline
9829 wrapper, defer folding it after always_inline functions have
9830 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9831 might not be performed. */
9833 bool
9834 avoid_folding_inline_builtin (tree fndecl)
9836 return (DECL_DECLARED_INLINE_P (fndecl)
9837 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9838 && cfun
9839 && !cfun->always_inline_functions_inlined
9840 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9843 /* A wrapper function for builtin folding that prevents warnings for
9844 "statement without effect" and the like, caused by removing the
9845 call node earlier than the warning is generated. */
9847 tree
9848 fold_call_expr (location_t loc, tree exp, bool ignore)
9850 tree ret = NULL_TREE;
9851 tree fndecl = get_callee_fndecl (exp);
9852 if (fndecl && fndecl_built_in_p (fndecl)
9853 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9854 yet. Defer folding until we see all the arguments
9855 (after inlining). */
9856 && !CALL_EXPR_VA_ARG_PACK (exp))
9858 int nargs = call_expr_nargs (exp);
9860 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9861 instead last argument is __builtin_va_arg_pack (). Defer folding
9862 even in that case, until arguments are finalized. */
9863 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9865 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9866 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9867 return NULL_TREE;
9870 if (avoid_folding_inline_builtin (fndecl))
9871 return NULL_TREE;
9873 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9874 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9875 CALL_EXPR_ARGP (exp), ignore);
9876 else
9878 tree *args = CALL_EXPR_ARGP (exp);
9879 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9880 if (ret)
9881 return ret;
9884 return NULL_TREE;
9887 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9888 N arguments are passed in the array ARGARRAY. Return a folded
9889 expression or NULL_TREE if no simplification was possible. */
9891 tree
9892 fold_builtin_call_array (location_t loc, tree,
9893 tree fn,
9894 int n,
9895 tree *argarray)
9897 if (TREE_CODE (fn) != ADDR_EXPR)
9898 return NULL_TREE;
9900 tree fndecl = TREE_OPERAND (fn, 0);
9901 if (TREE_CODE (fndecl) == FUNCTION_DECL
9902 && fndecl_built_in_p (fndecl))
9904 /* If last argument is __builtin_va_arg_pack (), arguments to this
9905 function are not finalized yet. Defer folding until they are. */
9906 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9908 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9909 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9910 return NULL_TREE;
9912 if (avoid_folding_inline_builtin (fndecl))
9913 return NULL_TREE;
9914 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9915 return targetm.fold_builtin (fndecl, n, argarray, false);
9916 else
9917 return fold_builtin_n (loc, fndecl, argarray, n, false);
9920 return NULL_TREE;
9923 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9924 along with N new arguments specified as the "..." parameters. SKIP
9925 is the number of arguments in EXP to be omitted. This function is used
9926 to do varargs-to-varargs transformations. */
9928 static tree
9929 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9931 va_list ap;
9932 tree t;
9934 va_start (ap, n);
9935 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9936 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9937 va_end (ap);
9939 return t;
9942 /* Validate a single argument ARG against a tree code CODE representing
9943 a type. Return true when argument is valid. */
9945 static bool
9946 validate_arg (const_tree arg, enum tree_code code)
9948 if (!arg)
9949 return false;
9950 else if (code == POINTER_TYPE)
9951 return POINTER_TYPE_P (TREE_TYPE (arg));
9952 else if (code == INTEGER_TYPE)
9953 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9954 return code == TREE_CODE (TREE_TYPE (arg));
9957 /* This function validates the types of a function call argument list
9958 against a specified list of tree_codes. If the last specifier is a 0,
9959 that represents an ellipses, otherwise the last specifier must be a
9960 VOID_TYPE.
9962 This is the GIMPLE version of validate_arglist. Eventually we want to
9963 completely convert builtins.c to work from GIMPLEs and the tree based
9964 validate_arglist will then be removed. */
9966 bool
9967 validate_gimple_arglist (const gcall *call, ...)
9969 enum tree_code code;
9970 bool res = 0;
9971 va_list ap;
9972 const_tree arg;
9973 size_t i;
9975 va_start (ap, call);
9976 i = 0;
9980 code = (enum tree_code) va_arg (ap, int);
9981 switch (code)
9983 case 0:
9984 /* This signifies an ellipses, any further arguments are all ok. */
9985 res = true;
9986 goto end;
9987 case VOID_TYPE:
9988 /* This signifies an endlink, if no arguments remain, return
9989 true, otherwise return false. */
9990 res = (i == gimple_call_num_args (call));
9991 goto end;
9992 default:
9993 /* If no parameters remain or the parameter's code does not
9994 match the specified code, return false. Otherwise continue
9995 checking any remaining arguments. */
9996 arg = gimple_call_arg (call, i++);
9997 if (!validate_arg (arg, code))
9998 goto end;
9999 break;
10002 while (1);
10004 /* We need gotos here since we can only have one VA_CLOSE in a
10005 function. */
10006 end: ;
10007 va_end (ap);
10009 return res;
10012 /* Default target-specific builtin expander that does nothing. */
10015 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10016 rtx target ATTRIBUTE_UNUSED,
10017 rtx subtarget ATTRIBUTE_UNUSED,
10018 machine_mode mode ATTRIBUTE_UNUSED,
10019 int ignore ATTRIBUTE_UNUSED)
10021 return NULL_RTX;
10024 /* Returns true is EXP represents data that would potentially reside
10025 in a readonly section. */
10027 bool
10028 readonly_data_expr (tree exp)
10030 STRIP_NOPS (exp);
10032 if (TREE_CODE (exp) != ADDR_EXPR)
10033 return false;
10035 exp = get_base_address (TREE_OPERAND (exp, 0));
10036 if (!exp)
10037 return false;
10039 /* Make sure we call decl_readonly_section only for trees it
10040 can handle (since it returns true for everything it doesn't
10041 understand). */
10042 if (TREE_CODE (exp) == STRING_CST
10043 || TREE_CODE (exp) == CONSTRUCTOR
10044 || (VAR_P (exp) && TREE_STATIC (exp)))
10045 return decl_readonly_section (exp, 0);
10046 else
10047 return false;
10050 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10051 to the call, and TYPE is its return type.
10053 Return NULL_TREE if no simplification was possible, otherwise return the
10054 simplified form of the call as a tree.
10056 The simplified form may be a constant or other expression which
10057 computes the same value, but in a more efficient manner (including
10058 calls to other builtin functions).
10060 The call may contain arguments which need to be evaluated, but
10061 which are not useful to determine the result of the call. In
10062 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10063 COMPOUND_EXPR will be an argument which must be evaluated.
10064 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10065 COMPOUND_EXPR in the chain will contain the tree for the simplified
10066 form of the builtin function call. */
10068 static tree
10069 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10071 if (!validate_arg (s1, POINTER_TYPE)
10072 || !validate_arg (s2, POINTER_TYPE))
10073 return NULL_TREE;
10074 else
10076 tree fn;
10077 const char *p1, *p2;
10079 p2 = c_getstr (s2);
10080 if (p2 == NULL)
10081 return NULL_TREE;
10083 p1 = c_getstr (s1);
10084 if (p1 != NULL)
10086 const char *r = strpbrk (p1, p2);
10087 tree tem;
10089 if (r == NULL)
10090 return build_int_cst (TREE_TYPE (s1), 0);
10092 /* Return an offset into the constant string argument. */
10093 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10094 return fold_convert_loc (loc, type, tem);
10097 if (p2[0] == '\0')
10098 /* strpbrk(x, "") == NULL.
10099 Evaluate and ignore s1 in case it had side-effects. */
10100 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10102 if (p2[1] != '\0')
10103 return NULL_TREE; /* Really call strpbrk. */
10105 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10106 if (!fn)
10107 return NULL_TREE;
10109 /* New argument list transforming strpbrk(s1, s2) to
10110 strchr(s1, s2[0]). */
10111 return build_call_expr_loc (loc, fn, 2, s1,
10112 build_int_cst (integer_type_node, p2[0]));
10116 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10117 to the call.
10119 Return NULL_TREE if no simplification was possible, otherwise return the
10120 simplified form of the call as a tree.
10122 The simplified form may be a constant or other expression which
10123 computes the same value, but in a more efficient manner (including
10124 calls to other builtin functions).
10126 The call may contain arguments which need to be evaluated, but
10127 which are not useful to determine the result of the call. In
10128 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10129 COMPOUND_EXPR will be an argument which must be evaluated.
10130 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10131 COMPOUND_EXPR in the chain will contain the tree for the simplified
10132 form of the builtin function call. */
10134 static tree
10135 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10137 if (!validate_arg (s1, POINTER_TYPE)
10138 || !validate_arg (s2, POINTER_TYPE))
10139 return NULL_TREE;
10140 else
10142 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10144 /* If either argument is "", return NULL_TREE. */
10145 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10146 /* Evaluate and ignore both arguments in case either one has
10147 side-effects. */
10148 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10149 s1, s2);
10150 return NULL_TREE;
10154 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10155 to the call.
10157 Return NULL_TREE if no simplification was possible, otherwise return the
10158 simplified form of the call as a tree.
10160 The simplified form may be a constant or other expression which
10161 computes the same value, but in a more efficient manner (including
10162 calls to other builtin functions).
10164 The call may contain arguments which need to be evaluated, but
10165 which are not useful to determine the result of the call. In
10166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10167 COMPOUND_EXPR will be an argument which must be evaluated.
10168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10169 COMPOUND_EXPR in the chain will contain the tree for the simplified
10170 form of the builtin function call. */
10172 static tree
10173 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10175 if (!validate_arg (s1, POINTER_TYPE)
10176 || !validate_arg (s2, POINTER_TYPE))
10177 return NULL_TREE;
10178 else
10180 /* If the first argument is "", return NULL_TREE. */
10181 const char *p1 = c_getstr (s1);
10182 if (p1 && *p1 == '\0')
10184 /* Evaluate and ignore argument s2 in case it has
10185 side-effects. */
10186 return omit_one_operand_loc (loc, size_type_node,
10187 size_zero_node, s2);
10190 /* If the second argument is "", return __builtin_strlen(s1). */
10191 const char *p2 = c_getstr (s2);
10192 if (p2 && *p2 == '\0')
10194 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10196 /* If the replacement _DECL isn't initialized, don't do the
10197 transformation. */
10198 if (!fn)
10199 return NULL_TREE;
10201 return build_call_expr_loc (loc, fn, 1, s1);
10203 return NULL_TREE;
10207 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10208 produced. False otherwise. This is done so that we don't output the error
10209 or warning twice or three times. */
10211 bool
10212 fold_builtin_next_arg (tree exp, bool va_start_p)
10214 tree fntype = TREE_TYPE (current_function_decl);
10215 int nargs = call_expr_nargs (exp);
10216 tree arg;
10217 /* There is good chance the current input_location points inside the
10218 definition of the va_start macro (perhaps on the token for
10219 builtin) in a system header, so warnings will not be emitted.
10220 Use the location in real source code. */
10221 location_t current_location =
10222 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10223 NULL);
10225 if (!stdarg_p (fntype))
10227 error ("%<va_start%> used in function with fixed args");
10228 return true;
10231 if (va_start_p)
10233 if (va_start_p && (nargs != 2))
10235 error ("wrong number of arguments to function %<va_start%>");
10236 return true;
10238 arg = CALL_EXPR_ARG (exp, 1);
10240 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10241 when we checked the arguments and if needed issued a warning. */
10242 else
10244 if (nargs == 0)
10246 /* Evidently an out of date version of <stdarg.h>; can't validate
10247 va_start's second argument, but can still work as intended. */
10248 warning_at (current_location,
10249 OPT_Wvarargs,
10250 "%<__builtin_next_arg%> called without an argument");
10251 return true;
10253 else if (nargs > 1)
10255 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10256 return true;
10258 arg = CALL_EXPR_ARG (exp, 0);
10261 if (TREE_CODE (arg) == SSA_NAME)
10262 arg = SSA_NAME_VAR (arg);
10264 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10265 or __builtin_next_arg (0) the first time we see it, after checking
10266 the arguments and if needed issuing a warning. */
10267 if (!integer_zerop (arg))
10269 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10271 /* Strip off all nops for the sake of the comparison. This
10272 is not quite the same as STRIP_NOPS. It does more.
10273 We must also strip off INDIRECT_EXPR for C++ reference
10274 parameters. */
10275 while (CONVERT_EXPR_P (arg)
10276 || TREE_CODE (arg) == INDIRECT_REF)
10277 arg = TREE_OPERAND (arg, 0);
10278 if (arg != last_parm)
10280 /* FIXME: Sometimes with the tree optimizers we can get the
10281 not the last argument even though the user used the last
10282 argument. We just warn and set the arg to be the last
10283 argument so that we will get wrong-code because of
10284 it. */
10285 warning_at (current_location,
10286 OPT_Wvarargs,
10287 "second parameter of %<va_start%> not last named argument");
10290 /* Undefined by C99 7.15.1.4p4 (va_start):
10291 "If the parameter parmN is declared with the register storage
10292 class, with a function or array type, or with a type that is
10293 not compatible with the type that results after application of
10294 the default argument promotions, the behavior is undefined."
10296 else if (DECL_REGISTER (arg))
10298 warning_at (current_location,
10299 OPT_Wvarargs,
10300 "undefined behavior when second parameter of "
10301 "%<va_start%> is declared with %<register%> storage");
10304 /* We want to verify the second parameter just once before the tree
10305 optimizers are run and then avoid keeping it in the tree,
10306 as otherwise we could warn even for correct code like:
10307 void foo (int i, ...)
10308 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10309 if (va_start_p)
10310 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10311 else
10312 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10314 return false;
10318 /* Expand a call EXP to __builtin_object_size. */
10320 static rtx
10321 expand_builtin_object_size (tree exp)
10323 tree ost;
10324 int object_size_type;
10325 tree fndecl = get_callee_fndecl (exp);
10327 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10329 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10330 exp, fndecl);
10331 expand_builtin_trap ();
10332 return const0_rtx;
10335 ost = CALL_EXPR_ARG (exp, 1);
10336 STRIP_NOPS (ost);
10338 if (TREE_CODE (ost) != INTEGER_CST
10339 || tree_int_cst_sgn (ost) < 0
10340 || compare_tree_int (ost, 3) > 0)
10342 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10343 exp, fndecl);
10344 expand_builtin_trap ();
10345 return const0_rtx;
10348 object_size_type = tree_to_shwi (ost);
10350 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10353 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10354 FCODE is the BUILT_IN_* to use.
10355 Return NULL_RTX if we failed; the caller should emit a normal call,
10356 otherwise try to get the result in TARGET, if convenient (and in
10357 mode MODE if that's convenient). */
10359 static rtx
10360 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10361 enum built_in_function fcode)
10363 if (!validate_arglist (exp,
10364 POINTER_TYPE,
10365 fcode == BUILT_IN_MEMSET_CHK
10366 ? INTEGER_TYPE : POINTER_TYPE,
10367 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10368 return NULL_RTX;
10370 tree dest = CALL_EXPR_ARG (exp, 0);
10371 tree src = CALL_EXPR_ARG (exp, 1);
10372 tree len = CALL_EXPR_ARG (exp, 2);
10373 tree size = CALL_EXPR_ARG (exp, 3);
10375 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10376 /*str=*/NULL_TREE, size);
10378 if (!tree_fits_uhwi_p (size))
10379 return NULL_RTX;
10381 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10383 /* Avoid transforming the checking call to an ordinary one when
10384 an overflow has been detected or when the call couldn't be
10385 validated because the size is not constant. */
10386 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10387 return NULL_RTX;
10389 tree fn = NULL_TREE;
10390 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10391 mem{cpy,pcpy,move,set} is available. */
10392 switch (fcode)
10394 case BUILT_IN_MEMCPY_CHK:
10395 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10396 break;
10397 case BUILT_IN_MEMPCPY_CHK:
10398 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10399 break;
10400 case BUILT_IN_MEMMOVE_CHK:
10401 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10402 break;
10403 case BUILT_IN_MEMSET_CHK:
10404 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10405 break;
10406 default:
10407 break;
10410 if (! fn)
10411 return NULL_RTX;
10413 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10414 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10415 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10416 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10418 else if (fcode == BUILT_IN_MEMSET_CHK)
10419 return NULL_RTX;
10420 else
10422 unsigned int dest_align = get_pointer_alignment (dest);
10424 /* If DEST is not a pointer type, call the normal function. */
10425 if (dest_align == 0)
10426 return NULL_RTX;
10428 /* If SRC and DEST are the same (and not volatile), do nothing. */
10429 if (operand_equal_p (src, dest, 0))
10431 tree expr;
10433 if (fcode != BUILT_IN_MEMPCPY_CHK)
10435 /* Evaluate and ignore LEN in case it has side-effects. */
10436 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10437 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10440 expr = fold_build_pointer_plus (dest, len);
10441 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10444 /* __memmove_chk special case. */
10445 if (fcode == BUILT_IN_MEMMOVE_CHK)
10447 unsigned int src_align = get_pointer_alignment (src);
10449 if (src_align == 0)
10450 return NULL_RTX;
10452 /* If src is categorized for a readonly section we can use
10453 normal __memcpy_chk. */
10454 if (readonly_data_expr (src))
10456 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10457 if (!fn)
10458 return NULL_RTX;
10459 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10460 dest, src, len, size);
10461 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10462 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10463 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10466 return NULL_RTX;
10470 /* Emit warning if a buffer overflow is detected at compile time. */
10472 static void
10473 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10475 /* The source string. */
10476 tree srcstr = NULL_TREE;
10477 /* The size of the destination object. */
10478 tree objsize = NULL_TREE;
10479 /* The string that is being concatenated with (as in __strcat_chk)
10480 or null if it isn't. */
10481 tree catstr = NULL_TREE;
10482 /* The maximum length of the source sequence in a bounded operation
10483 (such as __strncat_chk) or null if the operation isn't bounded
10484 (such as __strcat_chk). */
10485 tree maxread = NULL_TREE;
10486 /* The exact size of the access (such as in __strncpy_chk). */
10487 tree size = NULL_TREE;
10489 switch (fcode)
10491 case BUILT_IN_STRCPY_CHK:
10492 case BUILT_IN_STPCPY_CHK:
10493 srcstr = CALL_EXPR_ARG (exp, 1);
10494 objsize = CALL_EXPR_ARG (exp, 2);
10495 break;
10497 case BUILT_IN_STRCAT_CHK:
10498 /* For __strcat_chk the warning will be emitted only if overflowing
10499 by at least strlen (dest) + 1 bytes. */
10500 catstr = CALL_EXPR_ARG (exp, 0);
10501 srcstr = CALL_EXPR_ARG (exp, 1);
10502 objsize = CALL_EXPR_ARG (exp, 2);
10503 break;
10505 case BUILT_IN_STRNCAT_CHK:
10506 catstr = CALL_EXPR_ARG (exp, 0);
10507 srcstr = CALL_EXPR_ARG (exp, 1);
10508 maxread = CALL_EXPR_ARG (exp, 2);
10509 objsize = CALL_EXPR_ARG (exp, 3);
10510 break;
10512 case BUILT_IN_STRNCPY_CHK:
10513 case BUILT_IN_STPNCPY_CHK:
10514 srcstr = CALL_EXPR_ARG (exp, 1);
10515 size = CALL_EXPR_ARG (exp, 2);
10516 objsize = CALL_EXPR_ARG (exp, 3);
10517 break;
10519 case BUILT_IN_SNPRINTF_CHK:
10520 case BUILT_IN_VSNPRINTF_CHK:
10521 maxread = CALL_EXPR_ARG (exp, 1);
10522 objsize = CALL_EXPR_ARG (exp, 3);
10523 break;
10524 default:
10525 gcc_unreachable ();
10528 if (catstr && maxread)
10530 /* Check __strncat_chk. There is no way to determine the length
10531 of the string to which the source string is being appended so
10532 just warn when the length of the source string is not known. */
10533 check_strncat_sizes (exp, objsize);
10534 return;
10537 /* The destination argument is the first one for all built-ins above. */
10538 tree dst = CALL_EXPR_ARG (exp, 0);
10540 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10543 /* Emit warning if a buffer overflow is detected at compile time
10544 in __sprintf_chk/__vsprintf_chk calls. */
10546 static void
10547 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10549 tree size, len, fmt;
10550 const char *fmt_str;
10551 int nargs = call_expr_nargs (exp);
10553 /* Verify the required arguments in the original call. */
10555 if (nargs < 4)
10556 return;
10557 size = CALL_EXPR_ARG (exp, 2);
10558 fmt = CALL_EXPR_ARG (exp, 3);
10560 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10561 return;
10563 /* Check whether the format is a literal string constant. */
10564 fmt_str = c_getstr (fmt);
10565 if (fmt_str == NULL)
10566 return;
10568 if (!init_target_chars ())
10569 return;
10571 /* If the format doesn't contain % args or %%, we know its size. */
10572 if (strchr (fmt_str, target_percent) == 0)
10573 len = build_int_cstu (size_type_node, strlen (fmt_str));
10574 /* If the format is "%s" and first ... argument is a string literal,
10575 we know it too. */
10576 else if (fcode == BUILT_IN_SPRINTF_CHK
10577 && strcmp (fmt_str, target_percent_s) == 0)
10579 tree arg;
10581 if (nargs < 5)
10582 return;
10583 arg = CALL_EXPR_ARG (exp, 4);
10584 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10585 return;
10587 len = c_strlen (arg, 1);
10588 if (!len || ! tree_fits_uhwi_p (len))
10589 return;
10591 else
10592 return;
10594 /* Add one for the terminating nul. */
10595 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10597 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10598 /*maxread=*/NULL_TREE, len, size);
10601 /* Emit warning if a free is called with address of a variable. */
10603 static void
10604 maybe_emit_free_warning (tree exp)
10606 if (call_expr_nargs (exp) != 1)
10607 return;
10609 tree arg = CALL_EXPR_ARG (exp, 0);
10611 STRIP_NOPS (arg);
10612 if (TREE_CODE (arg) != ADDR_EXPR)
10613 return;
10615 arg = get_base_address (TREE_OPERAND (arg, 0));
10616 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10617 return;
10619 if (SSA_VAR_P (arg))
10620 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10621 "%Kattempt to free a non-heap object %qD", exp, arg);
10622 else
10623 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10624 "%Kattempt to free a non-heap object", exp);
10627 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10628 if possible. */
10630 static tree
10631 fold_builtin_object_size (tree ptr, tree ost)
10633 unsigned HOST_WIDE_INT bytes;
10634 int object_size_type;
10636 if (!validate_arg (ptr, POINTER_TYPE)
10637 || !validate_arg (ost, INTEGER_TYPE))
10638 return NULL_TREE;
10640 STRIP_NOPS (ost);
10642 if (TREE_CODE (ost) != INTEGER_CST
10643 || tree_int_cst_sgn (ost) < 0
10644 || compare_tree_int (ost, 3) > 0)
10645 return NULL_TREE;
10647 object_size_type = tree_to_shwi (ost);
10649 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10650 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10651 and (size_t) 0 for types 2 and 3. */
10652 if (TREE_SIDE_EFFECTS (ptr))
10653 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10655 if (TREE_CODE (ptr) == ADDR_EXPR)
10657 compute_builtin_object_size (ptr, object_size_type, &bytes);
10658 if (wi::fits_to_tree_p (bytes, size_type_node))
10659 return build_int_cstu (size_type_node, bytes);
10661 else if (TREE_CODE (ptr) == SSA_NAME)
10663 /* If object size is not known yet, delay folding until
10664 later. Maybe subsequent passes will help determining
10665 it. */
10666 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10667 && wi::fits_to_tree_p (bytes, size_type_node))
10668 return build_int_cstu (size_type_node, bytes);
10671 return NULL_TREE;
10674 /* Builtins with folding operations that operate on "..." arguments
10675 need special handling; we need to store the arguments in a convenient
10676 data structure before attempting any folding. Fortunately there are
10677 only a few builtins that fall into this category. FNDECL is the
10678 function, EXP is the CALL_EXPR for the call. */
10680 static tree
10681 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10683 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10684 tree ret = NULL_TREE;
10686 switch (fcode)
10688 case BUILT_IN_FPCLASSIFY:
10689 ret = fold_builtin_fpclassify (loc, args, nargs);
10690 break;
10692 default:
10693 break;
10695 if (ret)
10697 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10698 SET_EXPR_LOCATION (ret, loc);
10699 TREE_NO_WARNING (ret) = 1;
10700 return ret;
10702 return NULL_TREE;
10705 /* Initialize format string characters in the target charset. */
10707 bool
10708 init_target_chars (void)
10710 static bool init;
10711 if (!init)
10713 target_newline = lang_hooks.to_target_charset ('\n');
10714 target_percent = lang_hooks.to_target_charset ('%');
10715 target_c = lang_hooks.to_target_charset ('c');
10716 target_s = lang_hooks.to_target_charset ('s');
10717 if (target_newline == 0 || target_percent == 0 || target_c == 0
10718 || target_s == 0)
10719 return false;
10721 target_percent_c[0] = target_percent;
10722 target_percent_c[1] = target_c;
10723 target_percent_c[2] = '\0';
10725 target_percent_s[0] = target_percent;
10726 target_percent_s[1] = target_s;
10727 target_percent_s[2] = '\0';
10729 target_percent_s_newline[0] = target_percent;
10730 target_percent_s_newline[1] = target_s;
10731 target_percent_s_newline[2] = target_newline;
10732 target_percent_s_newline[3] = '\0';
10734 init = true;
10736 return true;
10739 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10740 and no overflow/underflow occurred. INEXACT is true if M was not
10741 exactly calculated. TYPE is the tree type for the result. This
10742 function assumes that you cleared the MPFR flags and then
10743 calculated M to see if anything subsequently set a flag prior to
10744 entering this function. Return NULL_TREE if any checks fail. */
10746 static tree
10747 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10749 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10750 overflow/underflow occurred. If -frounding-math, proceed iff the
10751 result of calling FUNC was exact. */
10752 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10753 && (!flag_rounding_math || !inexact))
10755 REAL_VALUE_TYPE rr;
10757 real_from_mpfr (&rr, m, type, GMP_RNDN);
10758 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10759 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10760 but the mpft_t is not, then we underflowed in the
10761 conversion. */
10762 if (real_isfinite (&rr)
10763 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10765 REAL_VALUE_TYPE rmode;
10767 real_convert (&rmode, TYPE_MODE (type), &rr);
10768 /* Proceed iff the specified mode can hold the value. */
10769 if (real_identical (&rmode, &rr))
10770 return build_real (type, rmode);
10773 return NULL_TREE;
10776 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10777 number and no overflow/underflow occurred. INEXACT is true if M
10778 was not exactly calculated. TYPE is the tree type for the result.
10779 This function assumes that you cleared the MPFR flags and then
10780 calculated M to see if anything subsequently set a flag prior to
10781 entering this function. Return NULL_TREE if any checks fail, if
10782 FORCE_CONVERT is true, then bypass the checks. */
10784 static tree
10785 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10787 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10788 overflow/underflow occurred. If -frounding-math, proceed iff the
10789 result of calling FUNC was exact. */
10790 if (force_convert
10791 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10792 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10793 && (!flag_rounding_math || !inexact)))
10795 REAL_VALUE_TYPE re, im;
10797 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10798 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10799 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10800 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10801 but the mpft_t is not, then we underflowed in the
10802 conversion. */
10803 if (force_convert
10804 || (real_isfinite (&re) && real_isfinite (&im)
10805 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10806 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10808 REAL_VALUE_TYPE re_mode, im_mode;
10810 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10811 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10812 /* Proceed iff the specified mode can hold the value. */
10813 if (force_convert
10814 || (real_identical (&re_mode, &re)
10815 && real_identical (&im_mode, &im)))
10816 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10817 build_real (TREE_TYPE (type), im_mode));
10820 return NULL_TREE;
10823 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10824 the pointer *(ARG_QUO) and return the result. The type is taken
10825 from the type of ARG0 and is used for setting the precision of the
10826 calculation and results. */
10828 static tree
10829 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10831 tree const type = TREE_TYPE (arg0);
10832 tree result = NULL_TREE;
10834 STRIP_NOPS (arg0);
10835 STRIP_NOPS (arg1);
10837 /* To proceed, MPFR must exactly represent the target floating point
10838 format, which only happens when the target base equals two. */
10839 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10840 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10841 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10843 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10844 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10846 if (real_isfinite (ra0) && real_isfinite (ra1))
10848 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10849 const int prec = fmt->p;
10850 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10851 tree result_rem;
10852 long integer_quo;
10853 mpfr_t m0, m1;
10855 mpfr_inits2 (prec, m0, m1, NULL);
10856 mpfr_from_real (m0, ra0, GMP_RNDN);
10857 mpfr_from_real (m1, ra1, GMP_RNDN);
10858 mpfr_clear_flags ();
10859 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10860 /* Remquo is independent of the rounding mode, so pass
10861 inexact=0 to do_mpfr_ckconv(). */
10862 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10863 mpfr_clears (m0, m1, NULL);
10864 if (result_rem)
10866 /* MPFR calculates quo in the host's long so it may
10867 return more bits in quo than the target int can hold
10868 if sizeof(host long) > sizeof(target int). This can
10869 happen even for native compilers in LP64 mode. In
10870 these cases, modulo the quo value with the largest
10871 number that the target int can hold while leaving one
10872 bit for the sign. */
10873 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10874 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10876 /* Dereference the quo pointer argument. */
10877 arg_quo = build_fold_indirect_ref (arg_quo);
10878 /* Proceed iff a valid pointer type was passed in. */
10879 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10881 /* Set the value. */
10882 tree result_quo
10883 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10884 build_int_cst (TREE_TYPE (arg_quo),
10885 integer_quo));
10886 TREE_SIDE_EFFECTS (result_quo) = 1;
10887 /* Combine the quo assignment with the rem. */
10888 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10889 result_quo, result_rem));
10894 return result;
10897 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10898 resulting value as a tree with type TYPE. The mpfr precision is
10899 set to the precision of TYPE. We assume that this mpfr function
10900 returns zero if the result could be calculated exactly within the
10901 requested precision. In addition, the integer pointer represented
10902 by ARG_SG will be dereferenced and set to the appropriate signgam
10903 (-1,1) value. */
10905 static tree
10906 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10908 tree result = NULL_TREE;
10910 STRIP_NOPS (arg);
10912 /* To proceed, MPFR must exactly represent the target floating point
10913 format, which only happens when the target base equals two. Also
10914 verify ARG is a constant and that ARG_SG is an int pointer. */
10915 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10916 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10917 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10918 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10920 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10922 /* In addition to NaN and Inf, the argument cannot be zero or a
10923 negative integer. */
10924 if (real_isfinite (ra)
10925 && ra->cl != rvc_zero
10926 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10928 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10929 const int prec = fmt->p;
10930 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10931 int inexact, sg;
10932 mpfr_t m;
10933 tree result_lg;
10935 mpfr_init2 (m, prec);
10936 mpfr_from_real (m, ra, GMP_RNDN);
10937 mpfr_clear_flags ();
10938 inexact = mpfr_lgamma (m, &sg, m, rnd);
10939 result_lg = do_mpfr_ckconv (m, type, inexact);
10940 mpfr_clear (m);
10941 if (result_lg)
10943 tree result_sg;
10945 /* Dereference the arg_sg pointer argument. */
10946 arg_sg = build_fold_indirect_ref (arg_sg);
10947 /* Assign the signgam value into *arg_sg. */
10948 result_sg = fold_build2 (MODIFY_EXPR,
10949 TREE_TYPE (arg_sg), arg_sg,
10950 build_int_cst (TREE_TYPE (arg_sg), sg));
10951 TREE_SIDE_EFFECTS (result_sg) = 1;
10952 /* Combine the signgam assignment with the lgamma result. */
10953 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10954 result_sg, result_lg));
10959 return result;
10962 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10963 mpc function FUNC on it and return the resulting value as a tree
10964 with type TYPE. The mpfr precision is set to the precision of
10965 TYPE. We assume that function FUNC returns zero if the result
10966 could be calculated exactly within the requested precision. If
10967 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10968 in the arguments and/or results. */
10970 tree
10971 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10972 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10974 tree result = NULL_TREE;
10976 STRIP_NOPS (arg0);
10977 STRIP_NOPS (arg1);
10979 /* To proceed, MPFR must exactly represent the target floating point
10980 format, which only happens when the target base equals two. */
10981 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10983 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10984 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10985 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10987 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10988 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10989 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10990 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10992 if (do_nonfinite
10993 || (real_isfinite (re0) && real_isfinite (im0)
10994 && real_isfinite (re1) && real_isfinite (im1)))
10996 const struct real_format *const fmt =
10997 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10998 const int prec = fmt->p;
10999 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11000 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11001 int inexact;
11002 mpc_t m0, m1;
11004 mpc_init2 (m0, prec);
11005 mpc_init2 (m1, prec);
11006 mpfr_from_real (mpc_realref (m0), re0, rnd);
11007 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11008 mpfr_from_real (mpc_realref (m1), re1, rnd);
11009 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11010 mpfr_clear_flags ();
11011 inexact = func (m0, m0, m1, crnd);
11012 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11013 mpc_clear (m0);
11014 mpc_clear (m1);
11018 return result;
11021 /* A wrapper function for builtin folding that prevents warnings for
11022 "statement without effect" and the like, caused by removing the
11023 call node earlier than the warning is generated. */
11025 tree
11026 fold_call_stmt (gcall *stmt, bool ignore)
11028 tree ret = NULL_TREE;
11029 tree fndecl = gimple_call_fndecl (stmt);
11030 location_t loc = gimple_location (stmt);
11031 if (fndecl && fndecl_built_in_p (fndecl)
11032 && !gimple_call_va_arg_pack_p (stmt))
11034 int nargs = gimple_call_num_args (stmt);
11035 tree *args = (nargs > 0
11036 ? gimple_call_arg_ptr (stmt, 0)
11037 : &error_mark_node);
11039 if (avoid_folding_inline_builtin (fndecl))
11040 return NULL_TREE;
11041 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11043 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11045 else
11047 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11048 if (ret)
11050 /* Propagate location information from original call to
11051 expansion of builtin. Otherwise things like
11052 maybe_emit_chk_warning, that operate on the expansion
11053 of a builtin, will use the wrong location information. */
11054 if (gimple_has_location (stmt))
11056 tree realret = ret;
11057 if (TREE_CODE (ret) == NOP_EXPR)
11058 realret = TREE_OPERAND (ret, 0);
11059 if (CAN_HAVE_LOCATION_P (realret)
11060 && !EXPR_HAS_LOCATION (realret))
11061 SET_EXPR_LOCATION (realret, loc);
11062 return realret;
11064 return ret;
11068 return NULL_TREE;
11071 /* Look up the function in builtin_decl that corresponds to DECL
11072 and set ASMSPEC as its user assembler name. DECL must be a
11073 function decl that declares a builtin. */
11075 void
11076 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11078 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11079 && asmspec != 0);
11081 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11082 set_user_assembler_name (builtin, asmspec);
11084 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11085 && INT_TYPE_SIZE < BITS_PER_WORD)
11087 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11088 set_user_assembler_libfunc ("ffs", asmspec);
11089 set_optab_libfunc (ffs_optab, mode, "ffs");
11093 /* Return true if DECL is a builtin that expands to a constant or similarly
11094 simple code. */
11095 bool
11096 is_simple_builtin (tree decl)
11098 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11099 switch (DECL_FUNCTION_CODE (decl))
11101 /* Builtins that expand to constants. */
11102 case BUILT_IN_CONSTANT_P:
11103 case BUILT_IN_EXPECT:
11104 case BUILT_IN_OBJECT_SIZE:
11105 case BUILT_IN_UNREACHABLE:
11106 /* Simple register moves or loads from stack. */
11107 case BUILT_IN_ASSUME_ALIGNED:
11108 case BUILT_IN_RETURN_ADDRESS:
11109 case BUILT_IN_EXTRACT_RETURN_ADDR:
11110 case BUILT_IN_FROB_RETURN_ADDR:
11111 case BUILT_IN_RETURN:
11112 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11113 case BUILT_IN_FRAME_ADDRESS:
11114 case BUILT_IN_VA_END:
11115 case BUILT_IN_STACK_SAVE:
11116 case BUILT_IN_STACK_RESTORE:
11117 /* Exception state returns or moves registers around. */
11118 case BUILT_IN_EH_FILTER:
11119 case BUILT_IN_EH_POINTER:
11120 case BUILT_IN_EH_COPY_VALUES:
11121 return true;
11123 default:
11124 return false;
11127 return false;
11130 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11131 most probably expanded inline into reasonably simple code. This is a
11132 superset of is_simple_builtin. */
11133 bool
11134 is_inexpensive_builtin (tree decl)
11136 if (!decl)
11137 return false;
11138 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11139 return true;
11140 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11141 switch (DECL_FUNCTION_CODE (decl))
11143 case BUILT_IN_ABS:
11144 CASE_BUILT_IN_ALLOCA:
11145 case BUILT_IN_BSWAP16:
11146 case BUILT_IN_BSWAP32:
11147 case BUILT_IN_BSWAP64:
11148 case BUILT_IN_CLZ:
11149 case BUILT_IN_CLZIMAX:
11150 case BUILT_IN_CLZL:
11151 case BUILT_IN_CLZLL:
11152 case BUILT_IN_CTZ:
11153 case BUILT_IN_CTZIMAX:
11154 case BUILT_IN_CTZL:
11155 case BUILT_IN_CTZLL:
11156 case BUILT_IN_FFS:
11157 case BUILT_IN_FFSIMAX:
11158 case BUILT_IN_FFSL:
11159 case BUILT_IN_FFSLL:
11160 case BUILT_IN_IMAXABS:
11161 case BUILT_IN_FINITE:
11162 case BUILT_IN_FINITEF:
11163 case BUILT_IN_FINITEL:
11164 case BUILT_IN_FINITED32:
11165 case BUILT_IN_FINITED64:
11166 case BUILT_IN_FINITED128:
11167 case BUILT_IN_FPCLASSIFY:
11168 case BUILT_IN_ISFINITE:
11169 case BUILT_IN_ISINF_SIGN:
11170 case BUILT_IN_ISINF:
11171 case BUILT_IN_ISINFF:
11172 case BUILT_IN_ISINFL:
11173 case BUILT_IN_ISINFD32:
11174 case BUILT_IN_ISINFD64:
11175 case BUILT_IN_ISINFD128:
11176 case BUILT_IN_ISNAN:
11177 case BUILT_IN_ISNANF:
11178 case BUILT_IN_ISNANL:
11179 case BUILT_IN_ISNAND32:
11180 case BUILT_IN_ISNAND64:
11181 case BUILT_IN_ISNAND128:
11182 case BUILT_IN_ISNORMAL:
11183 case BUILT_IN_ISGREATER:
11184 case BUILT_IN_ISGREATEREQUAL:
11185 case BUILT_IN_ISLESS:
11186 case BUILT_IN_ISLESSEQUAL:
11187 case BUILT_IN_ISLESSGREATER:
11188 case BUILT_IN_ISUNORDERED:
11189 case BUILT_IN_VA_ARG_PACK:
11190 case BUILT_IN_VA_ARG_PACK_LEN:
11191 case BUILT_IN_VA_COPY:
11192 case BUILT_IN_TRAP:
11193 case BUILT_IN_SAVEREGS:
11194 case BUILT_IN_POPCOUNTL:
11195 case BUILT_IN_POPCOUNTLL:
11196 case BUILT_IN_POPCOUNTIMAX:
11197 case BUILT_IN_POPCOUNT:
11198 case BUILT_IN_PARITYL:
11199 case BUILT_IN_PARITYLL:
11200 case BUILT_IN_PARITYIMAX:
11201 case BUILT_IN_PARITY:
11202 case BUILT_IN_LABS:
11203 case BUILT_IN_LLABS:
11204 case BUILT_IN_PREFETCH:
11205 case BUILT_IN_ACC_ON_DEVICE:
11206 return true;
11208 default:
11209 return is_simple_builtin (decl);
11212 return false;
11215 /* Return true if T is a constant and the value cast to a target char
11216 can be represented by a host char.
11217 Store the casted char constant in *P if so. */
11219 bool
11220 target_char_cst_p (tree t, char *p)
11222 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11223 return false;
11225 *p = (char)tree_to_uhwi (t);
11226 return true;