re PR middle-end/91603 (Unaligned access in expand_assignment)
[official-gcc.git] / gcc / builtins.c
blobf8063c138a340a06d45b01c9bb7f43caf75e78b2
1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
134 static rtx expand_builtin_strcat (tree, rtx);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_stpncpy (tree, rtx);
139 static rtx expand_builtin_strncat (tree, rtx);
140 static rtx expand_builtin_strncpy (tree, rtx);
141 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
142 static rtx expand_builtin_memset (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_alloca (tree);
148 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
149 static rtx expand_builtin_frame_address (tree, tree);
150 static tree stabilize_va_list_loc (location_t, tree, int);
151 static rtx expand_builtin_expect (tree, rtx);
152 static rtx expand_builtin_expect_with_probability (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
216 bool
217 called_as_built_in (tree node)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
268 else if (TREE_CODE (exp) == LABEL_DECL)
270 else if (TREE_CODE (exp) == CONST_DECL)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
278 known_alignment = true;
280 else if (DECL_P (exp))
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
315 if (TMR_INDEX (exp))
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
346 else if (TREE_CODE (exp) == STRING_CST)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
354 known_alignment = true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
375 align = alt_align;
376 known_alignment = false;
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
510 return align;
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 unsigned n;
524 if (eltsize == 1)
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
534 else
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
543 return n;
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
553 if (TREE_NO_WARNING (arg))
554 return;
556 loc = expansion_point_location_if_in_system_header (loc);
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
573 tree
574 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data lendata = { };
579 tree len = c_strlen (exp, 1, &lendata);
580 if (len == NULL_TREE && lendata.minlen && lendata.decl)
582 if (size)
584 len = lendata.minlen;
585 if (lendata.off)
587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (lendata.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (lendata.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
600 else
601 *exact = false;
603 else
604 *exact = true;
606 *size = len;
608 return lendata.decl;
611 return NULL_TREE;
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if ARG references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
643 tree
644 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data = { };
650 if (!data)
651 data = &local_strlen_data;
653 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
655 tree src = STRIP_NOPS (arg);
656 if (TREE_CODE (src) == COND_EXPR
657 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
659 tree len1, len2;
661 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
662 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
663 if (tree_int_cst_equal (len1, len2))
664 return len1;
667 if (TREE_CODE (src) == COMPOUND_EXPR
668 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
669 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
671 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
673 /* Offset from the beginning of the string in bytes. */
674 tree byteoff;
675 tree memsize;
676 tree decl;
677 src = string_constant (src, &byteoff, &memsize, &decl);
678 if (src == 0)
679 return NULL_TREE;
681 /* Determine the size of the string element. */
682 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
683 return NULL_TREE;
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
690 all NUL. */
691 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
692 strelts = strelts / eltsize;
694 if (!tree_fits_uhwi_p (memsize))
695 return NULL_TREE;
697 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr = TREE_STRING_POINTER (src);
703 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
705 /* The code below works only for single byte character types. */
706 if (eltsize != 1)
707 return NULL_TREE;
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len = string_length (ptr, eltsize, strelts);
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len + 1 < strelts)
719 return NULL_TREE;
720 else if (len >= maxelts)
722 data->decl = decl;
723 data->off = byteoff;
724 data->minlen = ssize_int (len);
725 return NULL_TREE;
728 /* For empty strings the result should be zero. */
729 if (len == 0)
730 return ssize_int (0);
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
738 : byteoff;
739 offsave = fold_convert_loc (loc, sizetype, offsave);
740 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
741 size_int (len));
742 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
743 offsave);
744 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
745 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
746 build_zero_cst (ssizetype));
749 /* Offset from the beginning of the string in elements. */
750 HOST_WIDE_INT eltoff;
752 /* We have a known offset into the string. Start searching there for
753 a null character if we can represent it as a single HOST_WIDE_INT. */
754 if (byteoff == 0)
755 eltoff = 0;
756 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
757 eltoff = -1;
758 else
759 eltoff = tree_to_uhwi (byteoff) / eltsize;
761 /* If the offset is known to be out of bounds, warn, and call strlen at
762 runtime. */
763 if (eltoff < 0 || eltoff >= maxelts)
765 /* Suppress multiple warnings for propagated constant strings. */
766 if (only_value != 2
767 && !TREE_NO_WARNING (arg)
768 && warning_at (loc, OPT_Warray_bounds,
769 "offset %qwi outside bounds of constant string",
770 eltoff))
772 if (decl)
773 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
774 TREE_NO_WARNING (arg) = 1;
776 return NULL_TREE;
779 /* If eltoff is larger than strelts but less than maxelts the
780 string length is zero, since the excess memory will be zero. */
781 if (eltoff > strelts)
782 return ssize_int (0);
784 /* Use strlen to search for the first zero byte. Since any strings
785 constructed with build_string will have nulls appended, we win even
786 if we get handed something like (char[4])"abcd".
788 Since ELTOFF is our starting index into the string, no further
789 calculation is needed. */
790 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
791 strelts - eltoff);
793 /* Don't know what to return if there was no zero termination.
794 Ideally this would turn into a gcc_checking_assert over time.
795 Set DECL/LEN so callers can examine them. */
796 if (len >= maxelts - eltoff)
798 data->decl = decl;
799 data->off = byteoff;
800 data->minlen = ssize_int (len);
801 return NULL_TREE;
804 return ssize_int (len);
807 /* Return a constant integer corresponding to target reading
808 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
809 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
810 are assumed to be zero, otherwise it reads as many characters
811 as needed. */
814 c_readstr (const char *str, scalar_int_mode mode,
815 bool null_terminated_p/*=true*/)
817 HOST_WIDE_INT ch;
818 unsigned int i, j;
819 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
821 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
822 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
823 / HOST_BITS_PER_WIDE_INT;
825 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
826 for (i = 0; i < len; i++)
827 tmp[i] = 0;
829 ch = 1;
830 for (i = 0; i < GET_MODE_SIZE (mode); i++)
832 j = i;
833 if (WORDS_BIG_ENDIAN)
834 j = GET_MODE_SIZE (mode) - i - 1;
835 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
836 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
837 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
838 j *= BITS_PER_UNIT;
840 if (ch || !null_terminated_p)
841 ch = (unsigned char) str[i];
842 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
845 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
846 return immed_wide_int_const (c, mode);
849 /* Cast a target constant CST to target CHAR and if that value fits into
850 host char type, return zero and put that value into variable pointed to by
851 P. */
853 static int
854 target_char_cast (tree cst, char *p)
856 unsigned HOST_WIDE_INT val, hostval;
858 if (TREE_CODE (cst) != INTEGER_CST
859 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
860 return 1;
862 /* Do not care if it fits or not right here. */
863 val = TREE_INT_CST_LOW (cst);
865 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
866 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
868 hostval = val;
869 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
870 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
872 if (val != hostval)
873 return 1;
875 *p = hostval;
876 return 0;
879 /* Similar to save_expr, but assumes that arbitrary code is not executed
880 in between the multiple evaluations. In particular, we assume that a
881 non-addressable local variable will not be modified. */
883 static tree
884 builtin_save_expr (tree exp)
886 if (TREE_CODE (exp) == SSA_NAME
887 || (TREE_ADDRESSABLE (exp) == 0
888 && (TREE_CODE (exp) == PARM_DECL
889 || (VAR_P (exp) && !TREE_STATIC (exp)))))
890 return exp;
892 return save_expr (exp);
895 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
896 times to get the address of either a higher stack frame, or a return
897 address located within it (depending on FNDECL_CODE). */
899 static rtx
900 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
902 int i;
903 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
904 if (tem == NULL_RTX)
906 /* For a zero count with __builtin_return_address, we don't care what
907 frame address we return, because target-specific definitions will
908 override us. Therefore frame pointer elimination is OK, and using
909 the soft frame pointer is OK.
911 For a nonzero count, or a zero count with __builtin_frame_address,
912 we require a stable offset from the current frame pointer to the
913 previous one, so we must use the hard frame pointer, and
914 we must disable frame pointer elimination. */
915 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
916 tem = frame_pointer_rtx;
917 else
919 tem = hard_frame_pointer_rtx;
921 /* Tell reload not to eliminate the frame pointer. */
922 crtl->accesses_prior_frames = 1;
926 if (count > 0)
927 SETUP_FRAME_ADDRESSES ();
929 /* On the SPARC, the return address is not in the frame, it is in a
930 register. There is no way to access it off of the current frame
931 pointer, but it can be accessed off the previous frame pointer by
932 reading the value from the register window save area. */
933 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
934 count--;
936 /* Scan back COUNT frames to the specified frame. */
937 for (i = 0; i < count; i++)
939 /* Assume the dynamic chain pointer is in the word that the
940 frame address points to, unless otherwise specified. */
941 tem = DYNAMIC_CHAIN_ADDRESS (tem);
942 tem = memory_address (Pmode, tem);
943 tem = gen_frame_mem (Pmode, tem);
944 tem = copy_to_reg (tem);
947 /* For __builtin_frame_address, return what we've got. But, on
948 the SPARC for example, we may have to add a bias. */
949 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
950 return FRAME_ADDR_RTX (tem);
952 /* For __builtin_return_address, get the return address from that frame. */
953 #ifdef RETURN_ADDR_RTX
954 tem = RETURN_ADDR_RTX (count, tem);
955 #else
956 tem = memory_address (Pmode,
957 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
958 tem = gen_frame_mem (Pmode, tem);
959 #endif
960 return tem;
963 /* Alias set used for setjmp buffer. */
964 static alias_set_type setjmp_alias_set = -1;
966 /* Construct the leading half of a __builtin_setjmp call. Control will
967 return to RECEIVER_LABEL. This is also called directly by the SJLJ
968 exception handling code. */
970 void
971 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
974 rtx stack_save;
975 rtx mem;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
984 /* We store the frame pointer and the address of receiver_label in
985 the buffer and use the rest of it for the stack save area, which
986 is machine-dependent. */
988 mem = gen_rtx_MEM (Pmode, buf_addr);
989 set_mem_alias_set (mem, setjmp_alias_set);
990 emit_move_insn (mem, hard_frame_pointer_rtx);
992 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
993 GET_MODE_SIZE (Pmode))),
994 set_mem_alias_set (mem, setjmp_alias_set);
996 emit_move_insn (validize_mem (mem),
997 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
999 stack_save = gen_rtx_MEM (sa_mode,
1000 plus_constant (Pmode, buf_addr,
1001 2 * GET_MODE_SIZE (Pmode)));
1002 set_mem_alias_set (stack_save, setjmp_alias_set);
1003 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1005 /* If there is further processing to do, do it. */
1006 if (targetm.have_builtin_setjmp_setup ())
1007 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1009 /* We have a nonlocal label. */
1010 cfun->has_nonlocal_label = 1;
1013 /* Construct the trailing part of a __builtin_setjmp call. This is
1014 also called directly by the SJLJ exception handling code.
1015 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1017 void
1018 expand_builtin_setjmp_receiver (rtx receiver_label)
1020 rtx chain;
1022 /* Mark the FP as used when we get here, so we have to make sure it's
1023 marked as used by this function. */
1024 emit_use (hard_frame_pointer_rtx);
1026 /* Mark the static chain as clobbered here so life information
1027 doesn't get messed up for it. */
1028 chain = rtx_for_static_chain (current_function_decl, true);
1029 if (chain && REG_P (chain))
1030 emit_clobber (chain);
1032 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1034 /* If the argument pointer can be eliminated in favor of the
1035 frame pointer, we don't need to restore it. We assume here
1036 that if such an elimination is present, it can always be used.
1037 This is the case on all known machines; if we don't make this
1038 assumption, we do unnecessary saving on many machines. */
1039 size_t i;
1040 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1042 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1043 if (elim_regs[i].from == ARG_POINTER_REGNUM
1044 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1045 break;
1047 if (i == ARRAY_SIZE (elim_regs))
1049 /* Now restore our arg pointer from the address at which it
1050 was saved in our stack frame. */
1051 emit_move_insn (crtl->args.internal_arg_pointer,
1052 copy_to_reg (get_arg_pointer_save_area ()));
1056 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1057 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1058 else if (targetm.have_nonlocal_goto_receiver ())
1059 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1060 else
1061 { /* Nothing */ }
1063 /* We must not allow the code we just generated to be reordered by
1064 scheduling. Specifically, the update of the frame pointer must
1065 happen immediately, not later. */
1066 emit_insn (gen_blockage ());
1069 /* __builtin_longjmp is passed a pointer to an array of five words (not
1070 all will be used on all machines). It operates similarly to the C
1071 library function of the same name, but is more efficient. Much of
1072 the code below is copied from the handling of non-local gotos. */
1074 static void
1075 expand_builtin_longjmp (rtx buf_addr, rtx value)
1077 rtx fp, lab, stack;
1078 rtx_insn *insn, *last;
1079 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1081 /* DRAP is needed for stack realign if longjmp is expanded to current
1082 function */
1083 if (SUPPORTS_STACK_ALIGNMENT)
1084 crtl->need_drap = true;
1086 if (setjmp_alias_set == -1)
1087 setjmp_alias_set = new_alias_set ();
1089 buf_addr = convert_memory_address (Pmode, buf_addr);
1091 buf_addr = force_reg (Pmode, buf_addr);
1093 /* We require that the user must pass a second argument of 1, because
1094 that is what builtin_setjmp will return. */
1095 gcc_assert (value == const1_rtx);
1097 last = get_last_insn ();
1098 if (targetm.have_builtin_longjmp ())
1099 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1100 else
1102 fp = gen_rtx_MEM (Pmode, buf_addr);
1103 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1104 GET_MODE_SIZE (Pmode)));
1106 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1107 2 * GET_MODE_SIZE (Pmode)));
1108 set_mem_alias_set (fp, setjmp_alias_set);
1109 set_mem_alias_set (lab, setjmp_alias_set);
1110 set_mem_alias_set (stack, setjmp_alias_set);
1112 /* Pick up FP, label, and SP from the block and jump. This code is
1113 from expand_goto in stmt.c; see there for detailed comments. */
1114 if (targetm.have_nonlocal_goto ())
1115 /* We have to pass a value to the nonlocal_goto pattern that will
1116 get copied into the static_chain pointer, but it does not matter
1117 what that value is, because builtin_setjmp does not use it. */
1118 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1119 else
1121 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1122 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124 lab = copy_to_reg (lab);
1126 /* Restore the frame pointer and stack pointer. We must use a
1127 temporary since the setjmp buffer may be a local. */
1128 fp = copy_to_reg (fp);
1129 emit_stack_restore (SAVE_NONLOCAL, stack);
1131 /* Ensure the frame pointer move is not optimized. */
1132 emit_insn (gen_blockage ());
1133 emit_clobber (hard_frame_pointer_rtx);
1134 emit_clobber (frame_pointer_rtx);
1135 emit_move_insn (hard_frame_pointer_rtx, fp);
1137 emit_use (hard_frame_pointer_rtx);
1138 emit_use (stack_pointer_rtx);
1139 emit_indirect_jump (lab);
1143 /* Search backwards and mark the jump insn as a non-local goto.
1144 Note that this precludes the use of __builtin_longjmp to a
1145 __builtin_setjmp target in the same function. However, we've
1146 already cautioned the user that these functions are for
1147 internal exception handling use only. */
1148 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1150 gcc_assert (insn != last);
1152 if (JUMP_P (insn))
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1157 else if (CALL_P (insn))
1158 break;
1162 static inline bool
1163 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1165 return (iter->i < iter->n);
1168 /* This function validates the types of a function call argument list
1169 against a specified list of tree_codes. If the last specifier is a 0,
1170 that represents an ellipsis, otherwise the last specifier must be a
1171 VOID_TYPE. */
1173 static bool
1174 validate_arglist (const_tree callexpr, ...)
1176 enum tree_code code;
1177 bool res = 0;
1178 va_list ap;
1179 const_call_expr_arg_iterator iter;
1180 const_tree arg;
1182 va_start (ap, callexpr);
1183 init_const_call_expr_arg_iterator (callexpr, &iter);
1185 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1186 tree fn = CALL_EXPR_FN (callexpr);
1187 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1189 for (unsigned argno = 1; ; ++argno)
1191 code = (enum tree_code) va_arg (ap, int);
1193 switch (code)
1195 case 0:
1196 /* This signifies an ellipses, any further arguments are all ok. */
1197 res = true;
1198 goto end;
1199 case VOID_TYPE:
1200 /* This signifies an endlink, if no arguments remain, return
1201 true, otherwise return false. */
1202 res = !more_const_call_expr_args_p (&iter);
1203 goto end;
1204 case POINTER_TYPE:
1205 /* The actual argument must be nonnull when either the whole
1206 called function has been declared nonnull, or when the formal
1207 argument corresponding to the actual argument has been. */
1208 if (argmap
1209 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1211 arg = next_const_call_expr_arg (&iter);
1212 if (!validate_arg (arg, code) || integer_zerop (arg))
1213 goto end;
1214 break;
1216 /* FALLTHRU */
1217 default:
1218 /* If no parameters remain or the parameter's code does not
1219 match the specified code, return false. Otherwise continue
1220 checking any remaining arguments. */
1221 arg = next_const_call_expr_arg (&iter);
1222 if (!validate_arg (arg, code))
1223 goto end;
1224 break;
1228 /* We need gotos here since we can only have one VA_CLOSE in a
1229 function. */
1230 end: ;
1231 va_end (ap);
1233 BITMAP_FREE (argmap);
1235 return res;
1238 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1239 and the address of the save area. */
1241 static rtx
1242 expand_builtin_nonlocal_goto (tree exp)
1244 tree t_label, t_save_area;
1245 rtx r_label, r_save_area, r_fp, r_sp;
1246 rtx_insn *insn;
1248 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1249 return NULL_RTX;
1251 t_label = CALL_EXPR_ARG (exp, 0);
1252 t_save_area = CALL_EXPR_ARG (exp, 1);
1254 r_label = expand_normal (t_label);
1255 r_label = convert_memory_address (Pmode, r_label);
1256 r_save_area = expand_normal (t_save_area);
1257 r_save_area = convert_memory_address (Pmode, r_save_area);
1258 /* Copy the address of the save location to a register just in case it was
1259 based on the frame pointer. */
1260 r_save_area = copy_to_reg (r_save_area);
1261 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1262 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1263 plus_constant (Pmode, r_save_area,
1264 GET_MODE_SIZE (Pmode)));
1266 crtl->has_nonlocal_goto = 1;
1268 /* ??? We no longer need to pass the static chain value, afaik. */
1269 if (targetm.have_nonlocal_goto ())
1270 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1271 else
1273 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1274 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1276 r_label = copy_to_reg (r_label);
1278 /* Restore the frame pointer and stack pointer. We must use a
1279 temporary since the setjmp buffer may be a local. */
1280 r_fp = copy_to_reg (r_fp);
1281 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1283 /* Ensure the frame pointer move is not optimized. */
1284 emit_insn (gen_blockage ());
1285 emit_clobber (hard_frame_pointer_rtx);
1286 emit_clobber (frame_pointer_rtx);
1287 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1289 /* USE of hard_frame_pointer_rtx added for consistency;
1290 not clear if really needed. */
1291 emit_use (hard_frame_pointer_rtx);
1292 emit_use (stack_pointer_rtx);
1294 /* If the architecture is using a GP register, we must
1295 conservatively assume that the target function makes use of it.
1296 The prologue of functions with nonlocal gotos must therefore
1297 initialize the GP register to the appropriate value, and we
1298 must then make sure that this value is live at the point
1299 of the jump. (Note that this doesn't necessarily apply
1300 to targets with a nonlocal_goto pattern; they are free
1301 to implement it in their own way. Note also that this is
1302 a no-op if the GP register is a global invariant.) */
1303 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1304 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1305 emit_use (pic_offset_table_rtx);
1307 emit_indirect_jump (r_label);
1310 /* Search backwards to the jump insn and mark it as a
1311 non-local goto. */
1312 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1314 if (JUMP_P (insn))
1316 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1317 break;
1319 else if (CALL_P (insn))
1320 break;
1323 return const0_rtx;
1326 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1327 (not all will be used on all machines) that was passed to __builtin_setjmp.
1328 It updates the stack pointer in that block to the current value. This is
1329 also called directly by the SJLJ exception handling code. */
1331 void
1332 expand_builtin_update_setjmp_buf (rtx buf_addr)
1334 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1335 buf_addr = convert_memory_address (Pmode, buf_addr);
1336 rtx stack_save
1337 = gen_rtx_MEM (sa_mode,
1338 memory_address
1339 (sa_mode,
1340 plus_constant (Pmode, buf_addr,
1341 2 * GET_MODE_SIZE (Pmode))));
1343 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1346 /* Expand a call to __builtin_prefetch. For a target that does not support
1347 data prefetch, evaluate the memory address argument in case it has side
1348 effects. */
1350 static void
1351 expand_builtin_prefetch (tree exp)
1353 tree arg0, arg1, arg2;
1354 int nargs;
1355 rtx op0, op1, op2;
1357 if (!validate_arglist (exp, POINTER_TYPE, 0))
1358 return;
1360 arg0 = CALL_EXPR_ARG (exp, 0);
1362 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1363 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1364 locality). */
1365 nargs = call_expr_nargs (exp);
1366 if (nargs > 1)
1367 arg1 = CALL_EXPR_ARG (exp, 1);
1368 else
1369 arg1 = integer_zero_node;
1370 if (nargs > 2)
1371 arg2 = CALL_EXPR_ARG (exp, 2);
1372 else
1373 arg2 = integer_three_node;
1375 /* Argument 0 is an address. */
1376 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1378 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1379 if (TREE_CODE (arg1) != INTEGER_CST)
1381 error ("second argument to %<__builtin_prefetch%> must be a constant");
1382 arg1 = integer_zero_node;
1384 op1 = expand_normal (arg1);
1385 /* Argument 1 must be either zero or one. */
1386 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1388 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1389 " using zero");
1390 op1 = const0_rtx;
1393 /* Argument 2 (locality) must be a compile-time constant int. */
1394 if (TREE_CODE (arg2) != INTEGER_CST)
1396 error ("third argument to %<__builtin_prefetch%> must be a constant");
1397 arg2 = integer_zero_node;
1399 op2 = expand_normal (arg2);
1400 /* Argument 2 must be 0, 1, 2, or 3. */
1401 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1403 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1404 op2 = const0_rtx;
1407 if (targetm.have_prefetch ())
1409 class expand_operand ops[3];
1411 create_address_operand (&ops[0], op0);
1412 create_integer_operand (&ops[1], INTVAL (op1));
1413 create_integer_operand (&ops[2], INTVAL (op2));
1414 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1415 return;
1418 /* Don't do anything with direct references to volatile memory, but
1419 generate code to handle other side effects. */
1420 if (!MEM_P (op0) && side_effects_p (op0))
1421 emit_insn (op0);
1424 /* Get a MEM rtx for expression EXP which is the address of an operand
1425 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1426 the maximum length of the block of memory that might be accessed or
1427 NULL if unknown. */
1429 static rtx
1430 get_memory_rtx (tree exp, tree len)
1432 tree orig_exp = exp;
1433 rtx addr, mem;
1435 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1436 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1437 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1438 exp = TREE_OPERAND (exp, 0);
1440 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1441 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1443 /* Get an expression we can use to find the attributes to assign to MEM.
1444 First remove any nops. */
1445 while (CONVERT_EXPR_P (exp)
1446 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1447 exp = TREE_OPERAND (exp, 0);
1449 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1450 (as builtin stringops may alias with anything). */
1451 exp = fold_build2 (MEM_REF,
1452 build_array_type (char_type_node,
1453 build_range_type (sizetype,
1454 size_one_node, len)),
1455 exp, build_int_cst (ptr_type_node, 0));
1457 /* If the MEM_REF has no acceptable address, try to get the base object
1458 from the original address we got, and build an all-aliasing
1459 unknown-sized access to that one. */
1460 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1461 set_mem_attributes (mem, exp, 0);
1462 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1463 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1464 0))))
1466 exp = build_fold_addr_expr (exp);
1467 exp = fold_build2 (MEM_REF,
1468 build_array_type (char_type_node,
1469 build_range_type (sizetype,
1470 size_zero_node,
1471 NULL)),
1472 exp, build_int_cst (ptr_type_node, 0));
1473 set_mem_attributes (mem, exp, 0);
1475 set_mem_alias_set (mem, 0);
1476 return mem;
1479 /* Built-in functions to perform an untyped call and return. */
1481 #define apply_args_mode \
1482 (this_target_builtins->x_apply_args_mode)
1483 #define apply_result_mode \
1484 (this_target_builtins->x_apply_result_mode)
1486 /* Return the size required for the block returned by __builtin_apply_args,
1487 and initialize apply_args_mode. */
1489 static int
1490 apply_args_size (void)
1492 static int size = -1;
1493 int align;
1494 unsigned int regno;
1496 /* The values computed by this function never change. */
1497 if (size < 0)
1499 /* The first value is the incoming arg-pointer. */
1500 size = GET_MODE_SIZE (Pmode);
1502 /* The second value is the structure value address unless this is
1503 passed as an "invisible" first argument. */
1504 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1505 size += GET_MODE_SIZE (Pmode);
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if (FUNCTION_ARG_REGNO_P (regno))
1510 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1512 gcc_assert (mode != VOIDmode);
1514 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1515 if (size % align != 0)
1516 size = CEIL (size, align) * align;
1517 size += GET_MODE_SIZE (mode);
1518 apply_args_mode[regno] = mode;
1520 else
1522 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1525 return size;
1528 /* Return the size required for the block returned by __builtin_apply,
1529 and initialize apply_result_mode. */
1531 static int
1532 apply_result_size (void)
1534 static int size = -1;
1535 int align, regno;
1537 /* The values computed by this function never change. */
1538 if (size < 0)
1540 size = 0;
1542 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1543 if (targetm.calls.function_value_regno_p (regno))
1545 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1547 gcc_assert (mode != VOIDmode);
1549 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1550 if (size % align != 0)
1551 size = CEIL (size, align) * align;
1552 size += GET_MODE_SIZE (mode);
1553 apply_result_mode[regno] = mode;
1555 else
1556 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1558 /* Allow targets that use untyped_call and untyped_return to override
1559 the size so that machine-specific information can be stored here. */
1560 #ifdef APPLY_RESULT_SIZE
1561 size = APPLY_RESULT_SIZE;
1562 #endif
1564 return size;
1567 /* Create a vector describing the result block RESULT. If SAVEP is true,
1568 the result block is used to save the values; otherwise it is used to
1569 restore the values. */
1571 static rtx
1572 result_vector (int savep, rtx result)
1574 int regno, size, align, nelts;
1575 fixed_size_mode mode;
1576 rtx reg, mem;
1577 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1579 size = nelts = 0;
1580 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1581 if ((mode = apply_result_mode[regno]) != VOIDmode)
1583 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1584 if (size % align != 0)
1585 size = CEIL (size, align) * align;
1586 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1587 mem = adjust_address (result, mode, size);
1588 savevec[nelts++] = (savep
1589 ? gen_rtx_SET (mem, reg)
1590 : gen_rtx_SET (reg, mem));
1591 size += GET_MODE_SIZE (mode);
1593 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1596 /* Save the state required to perform an untyped call with the same
1597 arguments as were passed to the current function. */
1599 static rtx
1600 expand_builtin_apply_args_1 (void)
1602 rtx registers, tem;
1603 int size, align, regno;
1604 fixed_size_mode mode;
1605 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1607 /* Create a block where the arg-pointer, structure value address,
1608 and argument registers can be saved. */
1609 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1611 /* Walk past the arg-pointer and structure value address. */
1612 size = GET_MODE_SIZE (Pmode);
1613 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1614 size += GET_MODE_SIZE (Pmode);
1616 /* Save each register used in calling a function to the block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_args_mode[regno]) != VOIDmode)
1620 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1621 if (size % align != 0)
1622 size = CEIL (size, align) * align;
1624 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1626 emit_move_insn (adjust_address (registers, mode, size), tem);
1627 size += GET_MODE_SIZE (mode);
1630 /* Save the arg pointer to the block. */
1631 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1632 /* We need the pointer as the caller actually passed them to us, not
1633 as we might have pretended they were passed. Make sure it's a valid
1634 operand, as emit_move_insn isn't expected to handle a PLUS. */
1635 if (STACK_GROWS_DOWNWARD)
1637 = force_operand (plus_constant (Pmode, tem,
1638 crtl->args.pretend_args_size),
1639 NULL_RTX);
1640 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1642 size = GET_MODE_SIZE (Pmode);
1644 /* Save the structure value address unless this is passed as an
1645 "invisible" first argument. */
1646 if (struct_incoming_value)
1647 emit_move_insn (adjust_address (registers, Pmode, size),
1648 copy_to_reg (struct_incoming_value));
1650 /* Return the address of the block. */
1651 return copy_addr_to_reg (XEXP (registers, 0));
1654 /* __builtin_apply_args returns block of memory allocated on
1655 the stack into which is stored the arg pointer, structure
1656 value address, static chain, and all the registers that might
1657 possibly be used in performing a function call. The code is
1658 moved to the start of the function so the incoming values are
1659 saved. */
1661 static rtx
1662 expand_builtin_apply_args (void)
1664 /* Don't do __builtin_apply_args more than once in a function.
1665 Save the result of the first call and reuse it. */
1666 if (apply_args_value != 0)
1667 return apply_args_value;
1669 /* When this function is called, it means that registers must be
1670 saved on entry to this function. So we migrate the
1671 call to the first insn of this function. */
1672 rtx temp;
1674 start_sequence ();
1675 temp = expand_builtin_apply_args_1 ();
1676 rtx_insn *seq = get_insns ();
1677 end_sequence ();
1679 apply_args_value = temp;
1681 /* Put the insns after the NOTE that starts the function.
1682 If this is inside a start_sequence, make the outer-level insn
1683 chain current, so the code is placed at the start of the
1684 function. If internal_arg_pointer is a non-virtual pseudo,
1685 it needs to be placed after the function that initializes
1686 that pseudo. */
1687 push_topmost_sequence ();
1688 if (REG_P (crtl->args.internal_arg_pointer)
1689 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1690 emit_insn_before (seq, parm_birth_insn);
1691 else
1692 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1693 pop_topmost_sequence ();
1694 return temp;
1698 /* Perform an untyped call and save the state required to perform an
1699 untyped return of whatever value was returned by the given function. */
1701 static rtx
1702 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1704 int size, align, regno;
1705 fixed_size_mode mode;
1706 rtx incoming_args, result, reg, dest, src;
1707 rtx_call_insn *call_insn;
1708 rtx old_stack_level = 0;
1709 rtx call_fusage = 0;
1710 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1712 arguments = convert_memory_address (Pmode, arguments);
1714 /* Create a block where the return registers can be saved. */
1715 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1717 /* Fetch the arg pointer from the ARGUMENTS block. */
1718 incoming_args = gen_reg_rtx (Pmode);
1719 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1720 if (!STACK_GROWS_DOWNWARD)
1721 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1722 incoming_args, 0, OPTAB_LIB_WIDEN);
1724 /* Push a new argument block and copy the arguments. Do not allow
1725 the (potential) memcpy call below to interfere with our stack
1726 manipulations. */
1727 do_pending_stack_adjust ();
1728 NO_DEFER_POP;
1730 /* Save the stack with nonlocal if available. */
1731 if (targetm.have_save_stack_nonlocal ())
1732 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1733 else
1734 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1736 /* Allocate a block of memory onto the stack and copy the memory
1737 arguments to the outgoing arguments address. We can pass TRUE
1738 as the 4th argument because we just saved the stack pointer
1739 and will restore it right after the call. */
1740 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1742 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1743 may have already set current_function_calls_alloca to true.
1744 current_function_calls_alloca won't be set if argsize is zero,
1745 so we have to guarantee need_drap is true here. */
1746 if (SUPPORTS_STACK_ALIGNMENT)
1747 crtl->need_drap = true;
1749 dest = virtual_outgoing_args_rtx;
1750 if (!STACK_GROWS_DOWNWARD)
1752 if (CONST_INT_P (argsize))
1753 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1754 else
1755 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1757 dest = gen_rtx_MEM (BLKmode, dest);
1758 set_mem_align (dest, PARM_BOUNDARY);
1759 src = gen_rtx_MEM (BLKmode, incoming_args);
1760 set_mem_align (src, PARM_BOUNDARY);
1761 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1763 /* Refer to the argument block. */
1764 apply_args_size ();
1765 arguments = gen_rtx_MEM (BLKmode, arguments);
1766 set_mem_align (arguments, PARM_BOUNDARY);
1768 /* Walk past the arg-pointer and structure value address. */
1769 size = GET_MODE_SIZE (Pmode);
1770 if (struct_value)
1771 size += GET_MODE_SIZE (Pmode);
1773 /* Restore each of the registers previously saved. Make USE insns
1774 for each of these registers for use in making the call. */
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_args_mode[regno]) != VOIDmode)
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, regno);
1782 emit_move_insn (reg, adjust_address (arguments, mode, size));
1783 use_reg (&call_fusage, reg);
1784 size += GET_MODE_SIZE (mode);
1787 /* Restore the structure value address unless this is passed as an
1788 "invisible" first argument. */
1789 size = GET_MODE_SIZE (Pmode);
1790 if (struct_value)
1792 rtx value = gen_reg_rtx (Pmode);
1793 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1794 emit_move_insn (struct_value, value);
1795 if (REG_P (struct_value))
1796 use_reg (&call_fusage, struct_value);
1799 /* All arguments and registers used for the call are set up by now! */
1800 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1802 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1803 and we don't want to load it into a register as an optimization,
1804 because prepare_call_address already did it if it should be done. */
1805 if (GET_CODE (function) != SYMBOL_REF)
1806 function = memory_address (FUNCTION_MODE, function);
1808 /* Generate the actual call instruction and save the return value. */
1809 if (targetm.have_untyped_call ())
1811 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1812 emit_call_insn (targetm.gen_untyped_call (mem, result,
1813 result_vector (1, result)));
1815 else if (targetm.have_call_value ())
1817 rtx valreg = 0;
1819 /* Locate the unique return register. It is not possible to
1820 express a call that sets more than one return register using
1821 call_value; use untyped_call for that. In fact, untyped_call
1822 only needs to save the return registers in the given block. */
1823 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1824 if ((mode = apply_result_mode[regno]) != VOIDmode)
1826 gcc_assert (!valreg); /* have_untyped_call required. */
1828 valreg = gen_rtx_REG (mode, regno);
1831 emit_insn (targetm.gen_call_value (valreg,
1832 gen_rtx_MEM (FUNCTION_MODE, function),
1833 const0_rtx, NULL_RTX, const0_rtx));
1835 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1837 else
1838 gcc_unreachable ();
1840 /* Find the CALL insn we just emitted, and attach the register usage
1841 information. */
1842 call_insn = last_call_insn ();
1843 add_function_usage_to (call_insn, call_fusage);
1845 /* Restore the stack. */
1846 if (targetm.have_save_stack_nonlocal ())
1847 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1848 else
1849 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1850 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1852 OK_DEFER_POP;
1854 /* Return the address of the result block. */
1855 result = copy_addr_to_reg (XEXP (result, 0));
1856 return convert_memory_address (ptr_mode, result);
1859 /* Perform an untyped return. */
1861 static void
1862 expand_builtin_return (rtx result)
1864 int size, align, regno;
1865 fixed_size_mode mode;
1866 rtx reg;
1867 rtx_insn *call_fusage = 0;
1869 result = convert_memory_address (Pmode, result);
1871 apply_result_size ();
1872 result = gen_rtx_MEM (BLKmode, result);
1874 if (targetm.have_untyped_return ())
1876 rtx vector = result_vector (0, result);
1877 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1878 emit_barrier ();
1879 return;
1882 /* Restore the return value and note that each value is used. */
1883 size = 0;
1884 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1885 if ((mode = apply_result_mode[regno]) != VOIDmode)
1887 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1888 if (size % align != 0)
1889 size = CEIL (size, align) * align;
1890 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1891 emit_move_insn (reg, adjust_address (result, mode, size));
1893 push_to_sequence (call_fusage);
1894 emit_use (reg);
1895 call_fusage = get_insns ();
1896 end_sequence ();
1897 size += GET_MODE_SIZE (mode);
1900 /* Put the USE insns before the return. */
1901 emit_insn (call_fusage);
1903 /* Return whatever values was restored by jumping directly to the end
1904 of the function. */
1905 expand_naked_return ();
1908 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1910 static enum type_class
1911 type_to_class (tree type)
1913 switch (TREE_CODE (type))
1915 case VOID_TYPE: return void_type_class;
1916 case INTEGER_TYPE: return integer_type_class;
1917 case ENUMERAL_TYPE: return enumeral_type_class;
1918 case BOOLEAN_TYPE: return boolean_type_class;
1919 case POINTER_TYPE: return pointer_type_class;
1920 case REFERENCE_TYPE: return reference_type_class;
1921 case OFFSET_TYPE: return offset_type_class;
1922 case REAL_TYPE: return real_type_class;
1923 case COMPLEX_TYPE: return complex_type_class;
1924 case FUNCTION_TYPE: return function_type_class;
1925 case METHOD_TYPE: return method_type_class;
1926 case RECORD_TYPE: return record_type_class;
1927 case UNION_TYPE:
1928 case QUAL_UNION_TYPE: return union_type_class;
1929 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1930 ? string_type_class : array_type_class);
1931 case LANG_TYPE: return lang_type_class;
1932 default: return no_type_class;
1936 /* Expand a call EXP to __builtin_classify_type. */
1938 static rtx
1939 expand_builtin_classify_type (tree exp)
1941 if (call_expr_nargs (exp))
1942 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1943 return GEN_INT (no_type_class);
1946 /* This helper macro, meant to be used in mathfn_built_in below, determines
1947 which among a set of builtin math functions is appropriate for a given type
1948 mode. The `F' (float) and `L' (long double) are automatically generated
1949 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1950 types, there are additional types that are considered with 'F32', 'F64',
1951 'F128', etc. suffixes. */
1952 #define CASE_MATHFN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; break;
1956 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1957 types. */
1958 #define CASE_MATHFN_FLOATN(MATHFN) \
1959 CASE_CFN_##MATHFN: \
1960 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1961 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1962 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1963 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1964 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1965 break;
1966 /* Similar to above, but appends _R after any F/L suffix. */
1967 #define CASE_MATHFN_REENT(MATHFN) \
1968 case CFN_BUILT_IN_##MATHFN##_R: \
1969 case CFN_BUILT_IN_##MATHFN##F_R: \
1970 case CFN_BUILT_IN_##MATHFN##L_R: \
1971 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1972 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1974 /* Return a function equivalent to FN but operating on floating-point
1975 values of type TYPE, or END_BUILTINS if no such function exists.
1976 This is purely an operation on function codes; it does not guarantee
1977 that the target actually has an implementation of the function. */
1979 static built_in_function
1980 mathfn_built_in_2 (tree type, combined_fn fn)
1982 tree mtype;
1983 built_in_function fcode, fcodef, fcodel;
1984 built_in_function fcodef16 = END_BUILTINS;
1985 built_in_function fcodef32 = END_BUILTINS;
1986 built_in_function fcodef64 = END_BUILTINS;
1987 built_in_function fcodef128 = END_BUILTINS;
1988 built_in_function fcodef32x = END_BUILTINS;
1989 built_in_function fcodef64x = END_BUILTINS;
1990 built_in_function fcodef128x = END_BUILTINS;
1992 switch (fn)
1994 CASE_MATHFN (ACOS)
1995 CASE_MATHFN (ACOSH)
1996 CASE_MATHFN (ASIN)
1997 CASE_MATHFN (ASINH)
1998 CASE_MATHFN (ATAN)
1999 CASE_MATHFN (ATAN2)
2000 CASE_MATHFN (ATANH)
2001 CASE_MATHFN (CBRT)
2002 CASE_MATHFN_FLOATN (CEIL)
2003 CASE_MATHFN (CEXPI)
2004 CASE_MATHFN_FLOATN (COPYSIGN)
2005 CASE_MATHFN (COS)
2006 CASE_MATHFN (COSH)
2007 CASE_MATHFN (DREM)
2008 CASE_MATHFN (ERF)
2009 CASE_MATHFN (ERFC)
2010 CASE_MATHFN (EXP)
2011 CASE_MATHFN (EXP10)
2012 CASE_MATHFN (EXP2)
2013 CASE_MATHFN (EXPM1)
2014 CASE_MATHFN (FABS)
2015 CASE_MATHFN (FDIM)
2016 CASE_MATHFN_FLOATN (FLOOR)
2017 CASE_MATHFN_FLOATN (FMA)
2018 CASE_MATHFN_FLOATN (FMAX)
2019 CASE_MATHFN_FLOATN (FMIN)
2020 CASE_MATHFN (FMOD)
2021 CASE_MATHFN (FREXP)
2022 CASE_MATHFN (GAMMA)
2023 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2024 CASE_MATHFN (HUGE_VAL)
2025 CASE_MATHFN (HYPOT)
2026 CASE_MATHFN (ILOGB)
2027 CASE_MATHFN (ICEIL)
2028 CASE_MATHFN (IFLOOR)
2029 CASE_MATHFN (INF)
2030 CASE_MATHFN (IRINT)
2031 CASE_MATHFN (IROUND)
2032 CASE_MATHFN (ISINF)
2033 CASE_MATHFN (J0)
2034 CASE_MATHFN (J1)
2035 CASE_MATHFN (JN)
2036 CASE_MATHFN (LCEIL)
2037 CASE_MATHFN (LDEXP)
2038 CASE_MATHFN (LFLOOR)
2039 CASE_MATHFN (LGAMMA)
2040 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2041 CASE_MATHFN (LLCEIL)
2042 CASE_MATHFN (LLFLOOR)
2043 CASE_MATHFN (LLRINT)
2044 CASE_MATHFN (LLROUND)
2045 CASE_MATHFN (LOG)
2046 CASE_MATHFN (LOG10)
2047 CASE_MATHFN (LOG1P)
2048 CASE_MATHFN (LOG2)
2049 CASE_MATHFN (LOGB)
2050 CASE_MATHFN (LRINT)
2051 CASE_MATHFN (LROUND)
2052 CASE_MATHFN (MODF)
2053 CASE_MATHFN (NAN)
2054 CASE_MATHFN (NANS)
2055 CASE_MATHFN_FLOATN (NEARBYINT)
2056 CASE_MATHFN (NEXTAFTER)
2057 CASE_MATHFN (NEXTTOWARD)
2058 CASE_MATHFN (POW)
2059 CASE_MATHFN (POWI)
2060 CASE_MATHFN (POW10)
2061 CASE_MATHFN (REMAINDER)
2062 CASE_MATHFN (REMQUO)
2063 CASE_MATHFN_FLOATN (RINT)
2064 CASE_MATHFN_FLOATN (ROUND)
2065 CASE_MATHFN_FLOATN (ROUNDEVEN)
2066 CASE_MATHFN (SCALB)
2067 CASE_MATHFN (SCALBLN)
2068 CASE_MATHFN (SCALBN)
2069 CASE_MATHFN (SIGNBIT)
2070 CASE_MATHFN (SIGNIFICAND)
2071 CASE_MATHFN (SIN)
2072 CASE_MATHFN (SINCOS)
2073 CASE_MATHFN (SINH)
2074 CASE_MATHFN_FLOATN (SQRT)
2075 CASE_MATHFN (TAN)
2076 CASE_MATHFN (TANH)
2077 CASE_MATHFN (TGAMMA)
2078 CASE_MATHFN_FLOATN (TRUNC)
2079 CASE_MATHFN (Y0)
2080 CASE_MATHFN (Y1)
2081 CASE_MATHFN (YN)
2083 default:
2084 return END_BUILTINS;
2087 mtype = TYPE_MAIN_VARIANT (type);
2088 if (mtype == double_type_node)
2089 return fcode;
2090 else if (mtype == float_type_node)
2091 return fcodef;
2092 else if (mtype == long_double_type_node)
2093 return fcodel;
2094 else if (mtype == float16_type_node)
2095 return fcodef16;
2096 else if (mtype == float32_type_node)
2097 return fcodef32;
2098 else if (mtype == float64_type_node)
2099 return fcodef64;
2100 else if (mtype == float128_type_node)
2101 return fcodef128;
2102 else if (mtype == float32x_type_node)
2103 return fcodef32x;
2104 else if (mtype == float64x_type_node)
2105 return fcodef64x;
2106 else if (mtype == float128x_type_node)
2107 return fcodef128x;
2108 else
2109 return END_BUILTINS;
2112 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2113 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2114 otherwise use the explicit declaration. If we can't do the conversion,
2115 return null. */
2117 static tree
2118 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2120 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2121 if (fcode2 == END_BUILTINS)
2122 return NULL_TREE;
2124 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2125 return NULL_TREE;
2127 return builtin_decl_explicit (fcode2);
2130 /* Like mathfn_built_in_1, but always use the implicit array. */
2132 tree
2133 mathfn_built_in (tree type, combined_fn fn)
2135 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2138 /* Like mathfn_built_in_1, but take a built_in_function and
2139 always use the implicit array. */
2141 tree
2142 mathfn_built_in (tree type, enum built_in_function fn)
2144 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2147 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2148 return its code, otherwise return IFN_LAST. Note that this function
2149 only tests whether the function is defined in internals.def, not whether
2150 it is actually available on the target. */
2152 internal_fn
2153 associated_internal_fn (tree fndecl)
2155 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2156 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2157 switch (DECL_FUNCTION_CODE (fndecl))
2159 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2162 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2164 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2165 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2166 #include "internal-fn.def"
2168 CASE_FLT_FN (BUILT_IN_POW10):
2169 return IFN_EXP10;
2171 CASE_FLT_FN (BUILT_IN_DREM):
2172 return IFN_REMAINDER;
2174 CASE_FLT_FN (BUILT_IN_SCALBN):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2177 return IFN_LDEXP;
2178 return IFN_LAST;
2180 default:
2181 return IFN_LAST;
2185 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2186 on the current target by a call to an internal function, return the
2187 code of that internal function, otherwise return IFN_LAST. The caller
2188 is responsible for ensuring that any side-effects of the built-in
2189 call are dealt with correctly. E.g. if CALL sets errno, the caller
2190 must decide that the errno result isn't needed or make it available
2191 in some other way. */
2193 internal_fn
2194 replacement_internal_fn (gcall *call)
2196 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2198 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2199 if (ifn != IFN_LAST)
2201 tree_pair types = direct_internal_fn_types (ifn, call);
2202 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2203 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2204 return ifn;
2207 return IFN_LAST;
2210 /* Expand a call to the builtin trinary math functions (fma).
2211 Return NULL_RTX if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's
2215 operands. */
2217 static rtx
2218 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2220 optab builtin_optab;
2221 rtx op0, op1, op2, result;
2222 rtx_insn *insns;
2223 tree fndecl = get_callee_fndecl (exp);
2224 tree arg0, arg1, arg2;
2225 machine_mode mode;
2227 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2228 return NULL_RTX;
2230 arg0 = CALL_EXPR_ARG (exp, 0);
2231 arg1 = CALL_EXPR_ARG (exp, 1);
2232 arg2 = CALL_EXPR_ARG (exp, 2);
2234 switch (DECL_FUNCTION_CODE (fndecl))
2236 CASE_FLT_FN (BUILT_IN_FMA):
2237 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2238 builtin_optab = fma_optab; break;
2239 default:
2240 gcc_unreachable ();
2243 /* Make a suitable register to place result in. */
2244 mode = TYPE_MODE (TREE_TYPE (exp));
2246 /* Before working hard, check whether the instruction is available. */
2247 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2248 return NULL_RTX;
2250 result = gen_reg_rtx (mode);
2252 /* Always stabilize the argument list. */
2253 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2254 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2255 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2257 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2258 op1 = expand_normal (arg1);
2259 op2 = expand_normal (arg2);
2261 start_sequence ();
2263 /* Compute into RESULT.
2264 Set RESULT to wherever the result comes back. */
2265 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2266 result, 0);
2268 /* If we were unable to expand via the builtin, stop the sequence
2269 (without outputting the insns) and call to the library function
2270 with the stabilized argument list. */
2271 if (result == 0)
2273 end_sequence ();
2274 return expand_call (exp, target, target == const0_rtx);
2277 /* Output the entire sequence. */
2278 insns = get_insns ();
2279 end_sequence ();
2280 emit_insn (insns);
2282 return result;
2285 /* Expand a call to the builtin sin and cos math functions.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2288 function; if convenient, the result should be placed in TARGET.
2289 SUBTARGET may be used as the target for computing one of EXP's
2290 operands. */
2292 static rtx
2293 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2295 optab builtin_optab;
2296 rtx op0;
2297 rtx_insn *insns;
2298 tree fndecl = get_callee_fndecl (exp);
2299 machine_mode mode;
2300 tree arg;
2302 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2303 return NULL_RTX;
2305 arg = CALL_EXPR_ARG (exp, 0);
2307 switch (DECL_FUNCTION_CODE (fndecl))
2309 CASE_FLT_FN (BUILT_IN_SIN):
2310 CASE_FLT_FN (BUILT_IN_COS):
2311 builtin_optab = sincos_optab; break;
2312 default:
2313 gcc_unreachable ();
2316 /* Make a suitable register to place result in. */
2317 mode = TYPE_MODE (TREE_TYPE (exp));
2319 /* Check if sincos insn is available, otherwise fallback
2320 to sin or cos insn. */
2321 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2322 switch (DECL_FUNCTION_CODE (fndecl))
2324 CASE_FLT_FN (BUILT_IN_SIN):
2325 builtin_optab = sin_optab; break;
2326 CASE_FLT_FN (BUILT_IN_COS):
2327 builtin_optab = cos_optab; break;
2328 default:
2329 gcc_unreachable ();
2332 /* Before working hard, check whether the instruction is available. */
2333 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2335 rtx result = gen_reg_rtx (mode);
2337 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2338 need to expand the argument again. This way, we will not perform
2339 side-effects more the once. */
2340 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2342 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2344 start_sequence ();
2346 /* Compute into RESULT.
2347 Set RESULT to wherever the result comes back. */
2348 if (builtin_optab == sincos_optab)
2350 int ok;
2352 switch (DECL_FUNCTION_CODE (fndecl))
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2356 break;
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2359 break;
2360 default:
2361 gcc_unreachable ();
2363 gcc_assert (ok);
2365 else
2366 result = expand_unop (mode, builtin_optab, op0, result, 0);
2368 if (result != 0)
2370 /* Output the entire sequence. */
2371 insns = get_insns ();
2372 end_sequence ();
2373 emit_insn (insns);
2374 return result;
2377 /* If we were unable to expand via the builtin, stop the sequence
2378 (without outputting the insns) and call to the library function
2379 with the stabilized argument list. */
2380 end_sequence ();
2383 return expand_call (exp, target, target == const0_rtx);
2386 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2387 return an RTL instruction code that implements the functionality.
2388 If that isn't possible or available return CODE_FOR_nothing. */
2390 static enum insn_code
2391 interclass_mathfn_icode (tree arg, tree fndecl)
2393 bool errno_set = false;
2394 optab builtin_optab = unknown_optab;
2395 machine_mode mode;
2397 switch (DECL_FUNCTION_CODE (fndecl))
2399 CASE_FLT_FN (BUILT_IN_ILOGB):
2400 errno_set = true; builtin_optab = ilogb_optab; break;
2401 CASE_FLT_FN (BUILT_IN_ISINF):
2402 builtin_optab = isinf_optab; break;
2403 case BUILT_IN_ISNORMAL:
2404 case BUILT_IN_ISFINITE:
2405 CASE_FLT_FN (BUILT_IN_FINITE):
2406 case BUILT_IN_FINITED32:
2407 case BUILT_IN_FINITED64:
2408 case BUILT_IN_FINITED128:
2409 case BUILT_IN_ISINFD32:
2410 case BUILT_IN_ISINFD64:
2411 case BUILT_IN_ISINFD128:
2412 /* These builtins have no optabs (yet). */
2413 break;
2414 default:
2415 gcc_unreachable ();
2418 /* There's no easy way to detect the case we need to set EDOM. */
2419 if (flag_errno_math && errno_set)
2420 return CODE_FOR_nothing;
2422 /* Optab mode depends on the mode of the input argument. */
2423 mode = TYPE_MODE (TREE_TYPE (arg));
2425 if (builtin_optab)
2426 return optab_handler (builtin_optab, mode);
2427 return CODE_FOR_nothing;
2430 /* Expand a call to one of the builtin math functions that operate on
2431 floating point argument and output an integer result (ilogb, isinf,
2432 isnan, etc).
2433 Return 0 if a normal call should be emitted rather than expanding the
2434 function in-line. EXP is the expression that is a call to the builtin
2435 function; if convenient, the result should be placed in TARGET. */
2437 static rtx
2438 expand_builtin_interclass_mathfn (tree exp, rtx target)
2440 enum insn_code icode = CODE_FOR_nothing;
2441 rtx op0;
2442 tree fndecl = get_callee_fndecl (exp);
2443 machine_mode mode;
2444 tree arg;
2446 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2447 return NULL_RTX;
2449 arg = CALL_EXPR_ARG (exp, 0);
2450 icode = interclass_mathfn_icode (arg, fndecl);
2451 mode = TYPE_MODE (TREE_TYPE (arg));
2453 if (icode != CODE_FOR_nothing)
2455 class expand_operand ops[1];
2456 rtx_insn *last = get_last_insn ();
2457 tree orig_arg = arg;
2459 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2460 need to expand the argument again. This way, we will not perform
2461 side-effects more the once. */
2462 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2464 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466 if (mode != GET_MODE (op0))
2467 op0 = convert_to_mode (mode, op0, 0);
2469 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2470 if (maybe_legitimize_operands (icode, 0, 1, ops)
2471 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2472 return ops[0].value;
2474 delete_insns_since (last);
2475 CALL_EXPR_ARG (exp, 0) = orig_arg;
2478 return NULL_RTX;
2481 /* Expand a call to the builtin sincos math function.
2482 Return NULL_RTX if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2484 function. */
2486 static rtx
2487 expand_builtin_sincos (tree exp)
2489 rtx op0, op1, op2, target1, target2;
2490 machine_mode mode;
2491 tree arg, sinp, cosp;
2492 int result;
2493 location_t loc = EXPR_LOCATION (exp);
2494 tree alias_type, alias_off;
2496 if (!validate_arglist (exp, REAL_TYPE,
2497 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2498 return NULL_RTX;
2500 arg = CALL_EXPR_ARG (exp, 0);
2501 sinp = CALL_EXPR_ARG (exp, 1);
2502 cosp = CALL_EXPR_ARG (exp, 2);
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (arg));
2507 /* Check if sincos insn is available, otherwise emit the call. */
2508 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2509 return NULL_RTX;
2511 target1 = gen_reg_rtx (mode);
2512 target2 = gen_reg_rtx (mode);
2514 op0 = expand_normal (arg);
2515 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2516 alias_off = build_int_cst (alias_type, 0);
2517 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2518 sinp, alias_off));
2519 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2520 cosp, alias_off));
2522 /* Compute into target1 and target2.
2523 Set TARGET to wherever the result comes back. */
2524 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2525 gcc_assert (result);
2527 /* Move target1 and target2 to the memory locations indicated
2528 by op1 and op2. */
2529 emit_move_insn (op1, target1);
2530 emit_move_insn (op2, target2);
2532 return const0_rtx;
2535 /* Expand a call to the internal cexpi builtin to the sincos math function.
2536 EXP is the expression that is a call to the builtin function; if convenient,
2537 the result should be placed in TARGET. */
2539 static rtx
2540 expand_builtin_cexpi (tree exp, rtx target)
2542 tree fndecl = get_callee_fndecl (exp);
2543 tree arg, type;
2544 machine_mode mode;
2545 rtx op0, op1, op2;
2546 location_t loc = EXPR_LOCATION (exp);
2548 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2549 return NULL_RTX;
2551 arg = CALL_EXPR_ARG (exp, 0);
2552 type = TREE_TYPE (arg);
2553 mode = TYPE_MODE (TREE_TYPE (arg));
2555 /* Try expanding via a sincos optab, fall back to emitting a libcall
2556 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2557 is only generated from sincos, cexp or if we have either of them. */
2558 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2560 op1 = gen_reg_rtx (mode);
2561 op2 = gen_reg_rtx (mode);
2563 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2565 /* Compute into op1 and op2. */
2566 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2568 else if (targetm.libc_has_function (function_sincos))
2570 tree call, fn = NULL_TREE;
2571 tree top1, top2;
2572 rtx op1a, op2a;
2574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2575 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2576 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2577 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2578 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2579 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2580 else
2581 gcc_unreachable ();
2583 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2584 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op1a = copy_addr_to_reg (XEXP (op1, 0));
2586 op2a = copy_addr_to_reg (XEXP (op2, 0));
2587 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2588 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2590 /* Make sure not to fold the sincos call again. */
2591 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2592 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2593 call, 3, arg, top1, top2));
2595 else
2597 tree call, fn = NULL_TREE, narg;
2598 tree ctype = build_complex_type (type);
2600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2601 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2603 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2604 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2605 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2606 else
2607 gcc_unreachable ();
2609 /* If we don't have a decl for cexp create one. This is the
2610 friendliest fallback if the user calls __builtin_cexpi
2611 without full target C99 function support. */
2612 if (fn == NULL_TREE)
2614 tree fntype;
2615 const char *name = NULL;
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 name = "cexpf";
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 name = "cexp";
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 name = "cexpl";
2624 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2625 fn = build_fn_decl (name, fntype);
2628 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2629 build_real (type, dconst0), arg);
2631 /* Make sure not to fold the cexp call again. */
2632 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2633 return expand_expr (build_call_nary (ctype, call, 1, narg),
2634 target, VOIDmode, EXPAND_NORMAL);
2637 /* Now build the proper return type. */
2638 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2639 make_tree (TREE_TYPE (arg), op2),
2640 make_tree (TREE_TYPE (arg), op1)),
2641 target, VOIDmode, EXPAND_NORMAL);
2644 /* Conveniently construct a function call expression. FNDECL names the
2645 function to be called, N is the number of arguments, and the "..."
2646 parameters are the argument expressions. Unlike build_call_exr
2647 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2649 static tree
2650 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2652 va_list ap;
2653 tree fntype = TREE_TYPE (fndecl);
2654 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2656 va_start (ap, n);
2657 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2658 va_end (ap);
2659 SET_EXPR_LOCATION (fn, loc);
2660 return fn;
2663 /* Expand a call to one of the builtin rounding functions gcc defines
2664 as an extension (lfloor and lceil). As these are gcc extensions we
2665 do not need to worry about setting errno to EDOM.
2666 If expanding via optab fails, lower expression to (int)(floor(x)).
2667 EXP is the expression that is a call to the builtin function;
2668 if convenient, the result should be placed in TARGET. */
2670 static rtx
2671 expand_builtin_int_roundingfn (tree exp, rtx target)
2673 convert_optab builtin_optab;
2674 rtx op0, tmp;
2675 rtx_insn *insns;
2676 tree fndecl = get_callee_fndecl (exp);
2677 enum built_in_function fallback_fn;
2678 tree fallback_fndecl;
2679 machine_mode mode;
2680 tree arg;
2682 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2683 return NULL_RTX;
2685 arg = CALL_EXPR_ARG (exp, 0);
2687 switch (DECL_FUNCTION_CODE (fndecl))
2689 CASE_FLT_FN (BUILT_IN_ICEIL):
2690 CASE_FLT_FN (BUILT_IN_LCEIL):
2691 CASE_FLT_FN (BUILT_IN_LLCEIL):
2692 builtin_optab = lceil_optab;
2693 fallback_fn = BUILT_IN_CEIL;
2694 break;
2696 CASE_FLT_FN (BUILT_IN_IFLOOR):
2697 CASE_FLT_FN (BUILT_IN_LFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2699 builtin_optab = lfloor_optab;
2700 fallback_fn = BUILT_IN_FLOOR;
2701 break;
2703 default:
2704 gcc_unreachable ();
2707 /* Make a suitable register to place result in. */
2708 mode = TYPE_MODE (TREE_TYPE (exp));
2710 target = gen_reg_rtx (mode);
2712 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2713 need to expand the argument again. This way, we will not perform
2714 side-effects more the once. */
2715 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2717 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2719 start_sequence ();
2721 /* Compute into TARGET. */
2722 if (expand_sfix_optab (target, op0, builtin_optab))
2724 /* Output the entire sequence. */
2725 insns = get_insns ();
2726 end_sequence ();
2727 emit_insn (insns);
2728 return target;
2731 /* If we were unable to expand via the builtin, stop the sequence
2732 (without outputting the insns). */
2733 end_sequence ();
2735 /* Fall back to floating point rounding optab. */
2736 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2738 /* For non-C99 targets we may end up without a fallback fndecl here
2739 if the user called __builtin_lfloor directly. In this case emit
2740 a call to the floor/ceil variants nevertheless. This should result
2741 in the best user experience for not full C99 targets. */
2742 if (fallback_fndecl == NULL_TREE)
2744 tree fntype;
2745 const char *name = NULL;
2747 switch (DECL_FUNCTION_CODE (fndecl))
2749 case BUILT_IN_ICEIL:
2750 case BUILT_IN_LCEIL:
2751 case BUILT_IN_LLCEIL:
2752 name = "ceil";
2753 break;
2754 case BUILT_IN_ICEILF:
2755 case BUILT_IN_LCEILF:
2756 case BUILT_IN_LLCEILF:
2757 name = "ceilf";
2758 break;
2759 case BUILT_IN_ICEILL:
2760 case BUILT_IN_LCEILL:
2761 case BUILT_IN_LLCEILL:
2762 name = "ceill";
2763 break;
2764 case BUILT_IN_IFLOOR:
2765 case BUILT_IN_LFLOOR:
2766 case BUILT_IN_LLFLOOR:
2767 name = "floor";
2768 break;
2769 case BUILT_IN_IFLOORF:
2770 case BUILT_IN_LFLOORF:
2771 case BUILT_IN_LLFLOORF:
2772 name = "floorf";
2773 break;
2774 case BUILT_IN_IFLOORL:
2775 case BUILT_IN_LFLOORL:
2776 case BUILT_IN_LLFLOORL:
2777 name = "floorl";
2778 break;
2779 default:
2780 gcc_unreachable ();
2783 fntype = build_function_type_list (TREE_TYPE (arg),
2784 TREE_TYPE (arg), NULL_TREE);
2785 fallback_fndecl = build_fn_decl (name, fntype);
2788 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2790 tmp = expand_normal (exp);
2791 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2793 /* Truncate the result of floating point optab to integer
2794 via expand_fix (). */
2795 target = gen_reg_rtx (mode);
2796 expand_fix (target, tmp, 0);
2798 return target;
2801 /* Expand a call to one of the builtin math functions doing integer
2802 conversion (lrint).
2803 Return 0 if a normal call should be emitted rather than expanding the
2804 function in-line. EXP is the expression that is a call to the builtin
2805 function; if convenient, the result should be placed in TARGET. */
2807 static rtx
2808 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2810 convert_optab builtin_optab;
2811 rtx op0;
2812 rtx_insn *insns;
2813 tree fndecl = get_callee_fndecl (exp);
2814 tree arg;
2815 machine_mode mode;
2816 enum built_in_function fallback_fn = BUILT_IN_NONE;
2818 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2819 return NULL_RTX;
2821 arg = CALL_EXPR_ARG (exp, 0);
2823 switch (DECL_FUNCTION_CODE (fndecl))
2825 CASE_FLT_FN (BUILT_IN_IRINT):
2826 fallback_fn = BUILT_IN_LRINT;
2827 gcc_fallthrough ();
2828 CASE_FLT_FN (BUILT_IN_LRINT):
2829 CASE_FLT_FN (BUILT_IN_LLRINT):
2830 builtin_optab = lrint_optab;
2831 break;
2833 CASE_FLT_FN (BUILT_IN_IROUND):
2834 fallback_fn = BUILT_IN_LROUND;
2835 gcc_fallthrough ();
2836 CASE_FLT_FN (BUILT_IN_LROUND):
2837 CASE_FLT_FN (BUILT_IN_LLROUND):
2838 builtin_optab = lround_optab;
2839 break;
2841 default:
2842 gcc_unreachable ();
2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2847 return NULL_RTX;
2849 /* Make a suitable register to place result in. */
2850 mode = TYPE_MODE (TREE_TYPE (exp));
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (!flag_errno_math)
2855 rtx result = gen_reg_rtx (mode);
2857 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2858 need to expand the argument again. This way, we will not perform
2859 side-effects more the once. */
2860 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2862 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2864 start_sequence ();
2866 if (expand_sfix_optab (result, op0, builtin_optab))
2868 /* Output the entire sequence. */
2869 insns = get_insns ();
2870 end_sequence ();
2871 emit_insn (insns);
2872 return result;
2875 /* If we were unable to expand via the builtin, stop the sequence
2876 (without outputting the insns) and call to the library function
2877 with the stabilized argument list. */
2878 end_sequence ();
2881 if (fallback_fn != BUILT_IN_NONE)
2883 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2884 targets, (int) round (x) should never be transformed into
2885 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2886 a call to lround in the hope that the target provides at least some
2887 C99 functions. This should result in the best user experience for
2888 not full C99 targets. */
2889 tree fallback_fndecl = mathfn_built_in_1
2890 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2892 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2893 fallback_fndecl, 1, arg);
2895 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2896 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2897 return convert_to_mode (mode, target, 0);
2900 return expand_call (exp, target, target == const0_rtx);
2903 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2904 a normal call should be emitted rather than expanding the function
2905 in-line. EXP is the expression that is a call to the builtin
2906 function; if convenient, the result should be placed in TARGET. */
2908 static rtx
2909 expand_builtin_powi (tree exp, rtx target)
2911 tree arg0, arg1;
2912 rtx op0, op1;
2913 machine_mode mode;
2914 machine_mode mode2;
2916 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2917 return NULL_RTX;
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2921 mode = TYPE_MODE (TREE_TYPE (exp));
2923 /* Emit a libcall to libgcc. */
2925 /* Mode of the 2nd argument must match that of an int. */
2926 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2928 if (target == NULL_RTX)
2929 target = gen_reg_rtx (mode);
2931 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2932 if (GET_MODE (op0) != mode)
2933 op0 = convert_to_mode (mode, op0, 0);
2934 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2935 if (GET_MODE (op1) != mode2)
2936 op1 = convert_to_mode (mode2, op1, 0);
2938 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2939 target, LCT_CONST, mode,
2940 op0, mode, op1, mode2);
2942 return target;
2945 /* Expand expression EXP which is a call to the strlen builtin. Return
2946 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2947 try to get the result in TARGET, if convenient. */
2949 static rtx
2950 expand_builtin_strlen (tree exp, rtx target,
2951 machine_mode target_mode)
2953 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2954 return NULL_RTX;
2956 class expand_operand ops[4];
2957 rtx pat;
2958 tree len;
2959 tree src = CALL_EXPR_ARG (exp, 0);
2960 rtx src_reg;
2961 rtx_insn *before_strlen;
2962 machine_mode insn_mode;
2963 enum insn_code icode = CODE_FOR_nothing;
2964 unsigned int align;
2966 /* If the length can be computed at compile-time, return it. */
2967 len = c_strlen (src, 0);
2968 if (len)
2969 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2971 /* If the length can be computed at compile-time and is constant
2972 integer, but there are side-effects in src, evaluate
2973 src for side-effects, then return len.
2974 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2975 can be optimized into: i++; x = 3; */
2976 len = c_strlen (src, 1);
2977 if (len && TREE_CODE (len) == INTEGER_CST)
2979 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2980 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2985 /* If SRC is not a pointer type, don't do this operation inline. */
2986 if (align == 0)
2987 return NULL_RTX;
2989 /* Bail out if we can't compute strlen in the right mode. */
2990 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2992 icode = optab_handler (strlen_optab, insn_mode);
2993 if (icode != CODE_FOR_nothing)
2994 break;
2996 if (insn_mode == VOIDmode)
2997 return NULL_RTX;
2999 /* Make a place to hold the source address. We will not expand
3000 the actual source until we are sure that the expansion will
3001 not fail -- there are trees that cannot be expanded twice. */
3002 src_reg = gen_reg_rtx (Pmode);
3004 /* Mark the beginning of the strlen sequence so we can emit the
3005 source operand later. */
3006 before_strlen = get_last_insn ();
3008 create_output_operand (&ops[0], target, insn_mode);
3009 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3010 create_integer_operand (&ops[2], 0);
3011 create_integer_operand (&ops[3], align);
3012 if (!maybe_expand_insn (icode, 4, ops))
3013 return NULL_RTX;
3015 /* Check to see if the argument was declared attribute nonstring
3016 and if so, issue a warning since at this point it's not known
3017 to be nul-terminated. */
3018 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3020 /* Now that we are assured of success, expand the source. */
3021 start_sequence ();
3022 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3023 if (pat != src_reg)
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat) != Pmode)
3027 pat = convert_to_mode (Pmode, pat,
3028 POINTERS_EXTEND_UNSIGNED);
3029 #endif
3030 emit_move_insn (src_reg, pat);
3032 pat = get_insns ();
3033 end_sequence ();
3035 if (before_strlen)
3036 emit_insn_after (pat, before_strlen);
3037 else
3038 emit_insn_before (pat, get_insns ());
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops[0].value) == target_mode)
3042 target = ops[0].value;
3043 else if (target != 0)
3044 convert_move (target, ops[0].value, 0);
3045 else
3046 target = convert_to_mode (target_mode, ops[0].value, 0);
3048 return target;
3051 /* Expand call EXP to the strnlen built-in, returning the result
3052 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3054 static rtx
3055 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3057 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3058 return NULL_RTX;
3060 tree src = CALL_EXPR_ARG (exp, 0);
3061 tree bound = CALL_EXPR_ARG (exp, 1);
3063 if (!bound)
3064 return NULL_RTX;
3066 location_t loc = UNKNOWN_LOCATION;
3067 if (EXPR_HAS_LOCATION (exp))
3068 loc = EXPR_LOCATION (exp);
3070 tree maxobjsize = max_object_size ();
3071 tree func = get_callee_fndecl (exp);
3073 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3074 so these conversions aren't necessary. */
3075 c_strlen_data lendata = { };
3076 tree len = c_strlen (src, 0, &lendata, 1);
3077 if (len)
3078 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3080 if (TREE_CODE (bound) == INTEGER_CST)
3082 if (!TREE_NO_WARNING (exp)
3083 && tree_int_cst_lt (maxobjsize, bound)
3084 && warning_at (loc, OPT_Wstringop_overflow_,
3085 "%K%qD specified bound %E "
3086 "exceeds maximum object size %E",
3087 exp, func, bound, maxobjsize))
3088 TREE_NO_WARNING (exp) = true;
3090 bool exact = true;
3091 if (!len || TREE_CODE (len) != INTEGER_CST)
3093 /* Clear EXACT if LEN may be less than SRC suggests,
3094 such as in
3095 strnlen (&a[i], sizeof a)
3096 where the value of i is unknown. Unless i's value is
3097 zero, the call is unsafe because the bound is greater. */
3098 lendata.decl = unterminated_array (src, &len, &exact);
3099 if (!lendata.decl)
3100 return NULL_RTX;
3103 if (lendata.decl
3104 && !TREE_NO_WARNING (exp)
3105 && ((tree_int_cst_lt (len, bound))
3106 || !exact))
3108 location_t warnloc
3109 = expansion_point_location_if_in_system_header (loc);
3111 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3112 exact
3113 ? G_("%K%qD specified bound %E exceeds the size %E "
3114 "of unterminated array")
3115 : G_("%K%qD specified bound %E may exceed the size "
3116 "of at most %E of unterminated array"),
3117 exp, func, bound, len))
3119 inform (DECL_SOURCE_LOCATION (lendata.decl),
3120 "referenced argument declared here");
3121 TREE_NO_WARNING (exp) = true;
3122 return NULL_RTX;
3126 if (!len)
3127 return NULL_RTX;
3129 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3130 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 if (TREE_CODE (bound) != SSA_NAME)
3134 return NULL_RTX;
3136 wide_int min, max;
3137 enum value_range_kind rng = get_range_info (bound, &min, &max);
3138 if (rng != VR_RANGE)
3139 return NULL_RTX;
3141 if (!TREE_NO_WARNING (exp)
3142 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3143 && warning_at (loc, OPT_Wstringop_overflow_,
3144 "%K%qD specified bound [%wu, %wu] "
3145 "exceeds maximum object size %E",
3146 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3147 TREE_NO_WARNING (exp) = true;
3149 bool exact = true;
3150 if (!len || TREE_CODE (len) != INTEGER_CST)
3152 lendata.decl = unterminated_array (src, &len, &exact);
3153 if (!lendata.decl)
3154 return NULL_RTX;
3157 if (lendata.decl
3158 && !TREE_NO_WARNING (exp)
3159 && (wi::ltu_p (wi::to_wide (len), min)
3160 || !exact))
3162 location_t warnloc
3163 = expansion_point_location_if_in_system_header (loc);
3165 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3166 exact
3167 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3168 "the size %E of unterminated array")
3169 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3170 "the size of at most %E of unterminated array"),
3171 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3173 inform (DECL_SOURCE_LOCATION (lendata.decl),
3174 "referenced argument declared here");
3175 TREE_NO_WARNING (exp) = true;
3179 if (lendata.decl)
3180 return NULL_RTX;
3182 if (wi::gtu_p (min, wi::to_wide (len)))
3183 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3185 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3186 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3189 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3190 bytes from constant string DATA + OFFSET and return it as target
3191 constant. */
3193 static rtx
3194 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3195 scalar_int_mode mode)
3197 const char *str = (const char *) data;
3199 gcc_assert (offset >= 0
3200 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3201 <= strlen (str) + 1));
3203 return c_readstr (str + offset, mode);
3206 /* LEN specify length of the block of memcpy/memset operation.
3207 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3208 In some cases we can make very likely guess on max size, then we
3209 set it into PROBABLE_MAX_SIZE. */
3211 static void
3212 determine_block_size (tree len, rtx len_rtx,
3213 unsigned HOST_WIDE_INT *min_size,
3214 unsigned HOST_WIDE_INT *max_size,
3215 unsigned HOST_WIDE_INT *probable_max_size)
3217 if (CONST_INT_P (len_rtx))
3219 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3220 return;
3222 else
3224 wide_int min, max;
3225 enum value_range_kind range_type = VR_UNDEFINED;
3227 /* Determine bounds from the type. */
3228 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3229 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3230 else
3231 *min_size = 0;
3232 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3233 *probable_max_size = *max_size
3234 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3235 else
3236 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3238 if (TREE_CODE (len) == SSA_NAME)
3239 range_type = get_range_info (len, &min, &max);
3240 if (range_type == VR_RANGE)
3242 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3243 *min_size = min.to_uhwi ();
3244 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3245 *probable_max_size = *max_size = max.to_uhwi ();
3247 else if (range_type == VR_ANTI_RANGE)
3249 /* Anti range 0...N lets us to determine minimal size to N+1. */
3250 if (min == 0)
3252 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3253 *min_size = max.to_uhwi () + 1;
3255 /* Code like
3257 int n;
3258 if (n < 100)
3259 memcpy (a, b, n)
3261 Produce anti range allowing negative values of N. We still
3262 can use the information and make a guess that N is not negative.
3264 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3265 *probable_max_size = min.to_uhwi () - 1;
3268 gcc_checking_assert (*max_size <=
3269 (unsigned HOST_WIDE_INT)
3270 GET_MODE_MASK (GET_MODE (len_rtx)));
3273 /* Try to verify that the sizes and lengths of the arguments to a string
3274 manipulation function given by EXP are within valid bounds and that
3275 the operation does not lead to buffer overflow or read past the end.
3276 Arguments other than EXP may be null. When non-null, the arguments
3277 have the following meaning:
3278 DST is the destination of a copy call or NULL otherwise.
3279 SRC is the source of a copy call or NULL otherwise.
3280 DSTWRITE is the number of bytes written into the destination obtained
3281 from the user-supplied size argument to the function (such as in
3282 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3283 MAXREAD is the user-supplied bound on the length of the source sequence
3284 (such as in strncat(d, s, N). It specifies the upper limit on the number
3285 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3286 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3287 expression EXP is a string function call (as opposed to a memory call
3288 like memcpy). As an exception, SRCSTR can also be an integer denoting
3289 the precomputed size of the source string or object (for functions like
3290 memcpy).
3291 DSTSIZE is the size of the destination object specified by the last
3292 argument to the _chk builtins, typically resulting from the expansion
3293 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3294 DSTSIZE).
3296 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3297 SIZE_MAX.
3299 If the call is successfully verified as safe return true, otherwise
3300 return false. */
3302 static bool
3303 check_access (tree exp, tree, tree, tree dstwrite,
3304 tree maxread, tree srcstr, tree dstsize)
3306 int opt = OPT_Wstringop_overflow_;
3308 /* The size of the largest object is half the address space, or
3309 PTRDIFF_MAX. (This is way too permissive.) */
3310 tree maxobjsize = max_object_size ();
3312 /* Either the length of the source string for string functions or
3313 the size of the source object for raw memory functions. */
3314 tree slen = NULL_TREE;
3316 tree range[2] = { NULL_TREE, NULL_TREE };
3318 /* Set to true when the exact number of bytes written by a string
3319 function like strcpy is not known and the only thing that is
3320 known is that it must be at least one (for the terminating nul). */
3321 bool at_least_one = false;
3322 if (srcstr)
3324 /* SRCSTR is normally a pointer to string but as a special case
3325 it can be an integer denoting the length of a string. */
3326 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3328 /* Try to determine the range of lengths the source string
3329 refers to. If it can be determined and is less than
3330 the upper bound given by MAXREAD add one to it for
3331 the terminating nul. Otherwise, set it to one for
3332 the same reason, or to MAXREAD as appropriate. */
3333 c_strlen_data lendata = { };
3334 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3335 range[0] = lendata.minlen;
3336 range[1] = lendata.maxbound;
3337 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3339 if (maxread && tree_int_cst_le (maxread, range[0]))
3340 range[0] = range[1] = maxread;
3341 else
3342 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3343 range[0], size_one_node);
3345 if (maxread && tree_int_cst_le (maxread, range[1]))
3346 range[1] = maxread;
3347 else if (!integer_all_onesp (range[1]))
3348 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3349 range[1], size_one_node);
3351 slen = range[0];
3353 else
3355 at_least_one = true;
3356 slen = size_one_node;
3359 else
3360 slen = srcstr;
3363 if (!dstwrite && !maxread)
3365 /* When the only available piece of data is the object size
3366 there is nothing to do. */
3367 if (!slen)
3368 return true;
3370 /* Otherwise, when the length of the source sequence is known
3371 (as with strlen), set DSTWRITE to it. */
3372 if (!range[0])
3373 dstwrite = slen;
3376 if (!dstsize)
3377 dstsize = maxobjsize;
3379 if (dstwrite)
3380 get_size_range (dstwrite, range);
3382 tree func = get_callee_fndecl (exp);
3384 /* First check the number of bytes to be written against the maximum
3385 object size. */
3386 if (range[0]
3387 && TREE_CODE (range[0]) == INTEGER_CST
3388 && tree_int_cst_lt (maxobjsize, range[0]))
3390 if (TREE_NO_WARNING (exp))
3391 return false;
3393 location_t loc = tree_nonartificial_location (exp);
3394 loc = expansion_point_location_if_in_system_header (loc);
3396 bool warned;
3397 if (range[0] == range[1])
3398 warned = warning_at (loc, opt,
3399 "%K%qD specified size %E "
3400 "exceeds maximum object size %E",
3401 exp, func, range[0], maxobjsize);
3402 else
3403 warned = warning_at (loc, opt,
3404 "%K%qD specified size between %E and %E "
3405 "exceeds maximum object size %E",
3406 exp, func,
3407 range[0], range[1], maxobjsize);
3408 if (warned)
3409 TREE_NO_WARNING (exp) = true;
3411 return false;
3414 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3415 constant, and in range of unsigned HOST_WIDE_INT. */
3416 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3418 /* Next check the number of bytes to be written against the destination
3419 object size. */
3420 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3422 if (range[0]
3423 && TREE_CODE (range[0]) == INTEGER_CST
3424 && ((tree_fits_uhwi_p (dstsize)
3425 && tree_int_cst_lt (dstsize, range[0]))
3426 || (dstwrite
3427 && tree_fits_uhwi_p (dstwrite)
3428 && tree_int_cst_lt (dstwrite, range[0]))))
3430 if (TREE_NO_WARNING (exp))
3431 return false;
3433 location_t loc = tree_nonartificial_location (exp);
3434 loc = expansion_point_location_if_in_system_header (loc);
3436 if (dstwrite == slen && at_least_one)
3438 /* This is a call to strcpy with a destination of 0 size
3439 and a source of unknown length. The call will write
3440 at least one byte past the end of the destination. */
3441 warning_at (loc, opt,
3442 "%K%qD writing %E or more bytes into a region "
3443 "of size %E overflows the destination",
3444 exp, func, range[0], dstsize);
3446 else if (tree_int_cst_equal (range[0], range[1]))
3447 warning_n (loc, opt, tree_to_uhwi (range[0]),
3448 "%K%qD writing %E byte into a region "
3449 "of size %E overflows the destination",
3450 "%K%qD writing %E bytes into a region "
3451 "of size %E overflows the destination",
3452 exp, func, range[0], dstsize);
3453 else if (tree_int_cst_sign_bit (range[1]))
3455 /* Avoid printing the upper bound if it's invalid. */
3456 warning_at (loc, opt,
3457 "%K%qD writing %E or more bytes into a region "
3458 "of size %E overflows the destination",
3459 exp, func, range[0], dstsize);
3461 else
3462 warning_at (loc, opt,
3463 "%K%qD writing between %E and %E bytes into "
3464 "a region of size %E overflows the destination",
3465 exp, func, range[0], range[1],
3466 dstsize);
3468 /* Return error when an overflow has been detected. */
3469 return false;
3473 /* Check the maximum length of the source sequence against the size
3474 of the destination object if known, or against the maximum size
3475 of an object. */
3476 if (maxread)
3478 get_size_range (maxread, range);
3479 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3481 location_t loc = tree_nonartificial_location (exp);
3482 loc = expansion_point_location_if_in_system_header (loc);
3484 if (tree_int_cst_lt (maxobjsize, range[0]))
3486 if (TREE_NO_WARNING (exp))
3487 return false;
3489 /* Warn about crazy big sizes first since that's more
3490 likely to be meaningful than saying that the bound
3491 is greater than the object size if both are big. */
3492 if (range[0] == range[1])
3493 warning_at (loc, opt,
3494 "%K%qD specified bound %E "
3495 "exceeds maximum object size %E",
3496 exp, func,
3497 range[0], maxobjsize);
3498 else
3499 warning_at (loc, opt,
3500 "%K%qD specified bound between %E and %E "
3501 "exceeds maximum object size %E",
3502 exp, func,
3503 range[0], range[1], maxobjsize);
3505 return false;
3508 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3510 if (TREE_NO_WARNING (exp))
3511 return false;
3513 if (tree_int_cst_equal (range[0], range[1]))
3514 warning_at (loc, opt,
3515 "%K%qD specified bound %E "
3516 "exceeds destination size %E",
3517 exp, func,
3518 range[0], dstsize);
3519 else
3520 warning_at (loc, opt,
3521 "%K%qD specified bound between %E and %E "
3522 "exceeds destination size %E",
3523 exp, func,
3524 range[0], range[1], dstsize);
3525 return false;
3530 /* Check for reading past the end of SRC. */
3531 if (slen
3532 && slen == srcstr
3533 && dstwrite && range[0]
3534 && tree_int_cst_lt (slen, range[0]))
3536 if (TREE_NO_WARNING (exp))
3537 return false;
3539 location_t loc = tree_nonartificial_location (exp);
3541 if (tree_int_cst_equal (range[0], range[1]))
3542 warning_n (loc, opt, tree_to_uhwi (range[0]),
3543 "%K%qD reading %E byte from a region of size %E",
3544 "%K%qD reading %E bytes from a region of size %E",
3545 exp, func, range[0], slen);
3546 else if (tree_int_cst_sign_bit (range[1]))
3548 /* Avoid printing the upper bound if it's invalid. */
3549 warning_at (loc, opt,
3550 "%K%qD reading %E or more bytes from a region "
3551 "of size %E",
3552 exp, func, range[0], slen);
3554 else
3555 warning_at (loc, opt,
3556 "%K%qD reading between %E and %E bytes from a region "
3557 "of size %E",
3558 exp, func, range[0], range[1], slen);
3559 return false;
3562 return true;
3565 /* Determines the size of the member referenced by the COMPONENT_REF
3566 REF, using its initializer expression if necessary in order to
3567 determine the size of an initialized flexible array member.
3568 Returns the size (which might be zero for an object with
3569 an uninitialized flexible array member) or null if the size
3570 cannot be determined. */
3572 static tree
3573 component_size (tree ref)
3575 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
3577 tree member = TREE_OPERAND (ref, 1);
3579 /* If the member is not last or has a size greater than one, return
3580 it. Otherwise it's either a flexible array member or a zero-length
3581 array member, or an array of length one treated as such. */
3582 tree size = DECL_SIZE_UNIT (member);
3583 if (size
3584 && (!array_at_struct_end_p (ref)
3585 || (!integer_zerop (size)
3586 && !integer_onep (size))))
3587 return size;
3589 /* If the reference is to a declared object and the member a true
3590 flexible array, try to determine its size from its initializer. */
3591 poly_int64 off = 0;
3592 tree base = get_addr_base_and_unit_offset (ref, &off);
3593 if (!base || !VAR_P (base))
3594 return NULL_TREE;
3596 /* The size of any member of a declared object other than a flexible
3597 array member is that obtained above. */
3598 if (size)
3599 return size;
3601 if (tree init = DECL_INITIAL (base))
3602 if (TREE_CODE (init) == CONSTRUCTOR)
3604 off <<= LOG2_BITS_PER_UNIT;
3605 init = fold_ctor_reference (NULL_TREE, init, off, 0, base);
3606 if (init)
3607 return TYPE_SIZE_UNIT (TREE_TYPE (init));
3610 return DECL_EXTERNAL (base) ? NULL_TREE : integer_zero_node;
3613 /* Helper to compute the size of the object referenced by the DEST
3614 expression which must have pointer type, using Object Size type
3615 OSTYPE (only the least significant 2 bits are used). Return
3616 an estimate of the size of the object if successful or NULL when
3617 the size cannot be determined. When the referenced object involves
3618 a non-constant offset in some range the returned value represents
3619 the largest size given the smallest non-negative offset in the
3620 range. If nonnull, set *PDECL to the decl of the referenced
3621 subobject if it can be determined, or to null otherwise.
3622 The function is intended for diagnostics and should not be used
3623 to influence code generation or optimization. */
3625 tree
3626 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */)
3628 tree dummy = NULL_TREE;
3629 if (!pdecl)
3630 pdecl = &dummy;
3632 unsigned HOST_WIDE_INT size;
3634 /* Only the two least significant bits are meaningful. */
3635 ostype &= 3;
3637 if (compute_builtin_object_size (dest, ostype, &size))
3638 return build_int_cst (sizetype, size);
3640 if (TREE_CODE (dest) == SSA_NAME)
3642 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3643 if (!is_gimple_assign (stmt))
3644 return NULL_TREE;
3646 dest = gimple_assign_rhs1 (stmt);
3648 tree_code code = gimple_assign_rhs_code (stmt);
3649 if (code == POINTER_PLUS_EXPR)
3651 /* compute_builtin_object_size fails for addresses with
3652 non-constant offsets. Try to determine the range of
3653 such an offset here and use it to adjust the constant
3654 size. */
3655 tree off = gimple_assign_rhs2 (stmt);
3656 if (TREE_CODE (off) == INTEGER_CST)
3658 if (tree size = compute_objsize (dest, ostype, pdecl))
3660 wide_int wioff = wi::to_wide (off);
3661 wide_int wisiz = wi::to_wide (size);
3663 /* Ignore negative offsets for now. For others,
3664 use the lower bound as the most optimistic
3665 estimate of the (remaining) size. */
3666 if (wi::sign_mask (wioff))
3668 else if (wi::ltu_p (wioff, wisiz))
3669 return wide_int_to_tree (TREE_TYPE (size),
3670 wi::sub (wisiz, wioff));
3671 else
3672 return size_zero_node;
3675 else if (TREE_CODE (off) == SSA_NAME
3676 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3678 wide_int min, max;
3679 enum value_range_kind rng = get_range_info (off, &min, &max);
3681 if (rng == VR_RANGE)
3683 if (tree size = compute_objsize (dest, ostype, pdecl))
3685 wide_int wisiz = wi::to_wide (size);
3687 /* Ignore negative offsets for now. For others,
3688 use the lower bound as the most optimistic
3689 estimate of the (remaining)size. */
3690 if (wi::sign_mask (min)
3691 || wi::sign_mask (max))
3693 else if (wi::ltu_p (min, wisiz))
3694 return wide_int_to_tree (TREE_TYPE (size),
3695 wi::sub (wisiz, min));
3696 else
3697 return size_zero_node;
3702 else if (code != ADDR_EXPR)
3703 return NULL_TREE;
3706 /* Unless computing the largest size (for memcpy and other raw memory
3707 functions), try to determine the size of the object from its type. */
3708 if (!ostype)
3709 return NULL_TREE;
3711 if (TREE_CODE (dest) == ARRAY_REF
3712 || TREE_CODE (dest) == MEM_REF)
3714 tree ref = TREE_OPERAND (dest, 0);
3715 tree off = TREE_OPERAND (dest, 1);
3716 if (tree size = compute_objsize (ref, ostype, pdecl))
3718 /* If the declaration of the destination object is known
3719 to have zero size, return zero. */
3720 if (integer_zerop (size))
3721 return integer_zero_node;
3723 if (TREE_CODE (off) != INTEGER_CST
3724 || TREE_CODE (size) != INTEGER_CST)
3725 return NULL_TREE;
3727 if (TREE_CODE (dest) == ARRAY_REF)
3729 tree eltype = TREE_TYPE (dest);
3730 if (tree tpsize = TYPE_SIZE_UNIT (eltype))
3731 off = fold_build2 (MULT_EXPR, size_type_node, off, tpsize);
3732 else
3733 return NULL_TREE;
3736 if (tree_int_cst_lt (off, size))
3737 return fold_build2 (MINUS_EXPR, size_type_node, size, off);
3738 return integer_zero_node;
3741 return NULL_TREE;
3744 if (TREE_CODE (dest) == COMPONENT_REF)
3746 *pdecl = TREE_OPERAND (dest, 1);
3747 return component_size (dest);
3750 if (TREE_CODE (dest) != ADDR_EXPR)
3751 return NULL_TREE;
3753 tree ref = TREE_OPERAND (dest, 0);
3754 if (DECL_P (ref))
3756 *pdecl = ref;
3757 return DECL_SIZE_UNIT (ref);
3760 tree type = TREE_TYPE (dest);
3761 if (TREE_CODE (type) == POINTER_TYPE)
3762 type = TREE_TYPE (type);
3764 type = TYPE_MAIN_VARIANT (type);
3766 if (TREE_CODE (type) == ARRAY_TYPE
3767 && !array_at_struct_end_p (ref))
3769 if (tree size = TYPE_SIZE_UNIT (type))
3770 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3773 return NULL_TREE;
3776 /* Helper to determine and check the sizes of the source and the destination
3777 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3778 call expression, DEST is the destination argument, SRC is the source
3779 argument or null, and LEN is the number of bytes. Use Object Size type-0
3780 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3781 (no overflow or invalid sizes), false otherwise. */
3783 static bool
3784 check_memop_access (tree exp, tree dest, tree src, tree size)
3786 /* For functions like memset and memcpy that operate on raw memory
3787 try to determine the size of the largest source and destination
3788 object using type-0 Object Size regardless of the object size
3789 type specified by the option. */
3790 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3791 tree dstsize = compute_objsize (dest, 0);
3793 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3794 srcsize, dstsize);
3797 /* Validate memchr arguments without performing any expansion.
3798 Return NULL_RTX. */
3800 static rtx
3801 expand_builtin_memchr (tree exp, rtx)
3803 if (!validate_arglist (exp,
3804 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3805 return NULL_RTX;
3807 tree arg1 = CALL_EXPR_ARG (exp, 0);
3808 tree len = CALL_EXPR_ARG (exp, 2);
3810 /* Diagnose calls where the specified length exceeds the size
3811 of the object. */
3812 if (warn_stringop_overflow)
3814 tree size = compute_objsize (arg1, 0);
3815 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3816 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3819 return NULL_RTX;
3822 /* Expand a call EXP to the memcpy builtin.
3823 Return NULL_RTX if we failed, the caller should emit a normal call,
3824 otherwise try to get the result in TARGET, if convenient (and in
3825 mode MODE if that's convenient). */
3827 static rtx
3828 expand_builtin_memcpy (tree exp, rtx target)
3830 if (!validate_arglist (exp,
3831 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3832 return NULL_RTX;
3834 tree dest = CALL_EXPR_ARG (exp, 0);
3835 tree src = CALL_EXPR_ARG (exp, 1);
3836 tree len = CALL_EXPR_ARG (exp, 2);
3838 check_memop_access (exp, dest, src, len);
3840 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3841 /*retmode=*/ RETURN_BEGIN);
3844 /* Check a call EXP to the memmove built-in for validity.
3845 Return NULL_RTX on both success and failure. */
3847 static rtx
3848 expand_builtin_memmove (tree exp, rtx)
3850 if (!validate_arglist (exp,
3851 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3852 return NULL_RTX;
3854 tree dest = CALL_EXPR_ARG (exp, 0);
3855 tree src = CALL_EXPR_ARG (exp, 1);
3856 tree len = CALL_EXPR_ARG (exp, 2);
3858 check_memop_access (exp, dest, src, len);
3860 return NULL_RTX;
3863 /* Expand a call EXP to the mempcpy builtin.
3864 Return NULL_RTX if we failed; the caller should emit a normal call,
3865 otherwise try to get the result in TARGET, if convenient (and in
3866 mode MODE if that's convenient). */
3868 static rtx
3869 expand_builtin_mempcpy (tree exp, rtx target)
3871 if (!validate_arglist (exp,
3872 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3873 return NULL_RTX;
3875 tree dest = CALL_EXPR_ARG (exp, 0);
3876 tree src = CALL_EXPR_ARG (exp, 1);
3877 tree len = CALL_EXPR_ARG (exp, 2);
3879 /* Policy does not generally allow using compute_objsize (which
3880 is used internally by check_memop_size) to change code generation
3881 or drive optimization decisions.
3883 In this instance it is safe because the code we generate has
3884 the same semantics regardless of the return value of
3885 check_memop_sizes. Exactly the same amount of data is copied
3886 and the return value is exactly the same in both cases.
3888 Furthermore, check_memop_size always uses mode 0 for the call to
3889 compute_objsize, so the imprecise nature of compute_objsize is
3890 avoided. */
3892 /* Avoid expanding mempcpy into memcpy when the call is determined
3893 to overflow the buffer. This also prevents the same overflow
3894 from being diagnosed again when expanding memcpy. */
3895 if (!check_memop_access (exp, dest, src, len))
3896 return NULL_RTX;
3898 return expand_builtin_mempcpy_args (dest, src, len,
3899 target, exp, /*retmode=*/ RETURN_END);
3902 /* Helper function to do the actual work for expand of memory copy family
3903 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3904 of memory from SRC to DEST and assign to TARGET if convenient. Return
3905 value is based on RETMODE argument. */
3907 static rtx
3908 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3909 rtx target, tree exp, memop_ret retmode)
3911 const char *src_str;
3912 unsigned int src_align = get_pointer_alignment (src);
3913 unsigned int dest_align = get_pointer_alignment (dest);
3914 rtx dest_mem, src_mem, dest_addr, len_rtx;
3915 HOST_WIDE_INT expected_size = -1;
3916 unsigned int expected_align = 0;
3917 unsigned HOST_WIDE_INT min_size;
3918 unsigned HOST_WIDE_INT max_size;
3919 unsigned HOST_WIDE_INT probable_max_size;
3921 bool is_move_done;
3923 /* If DEST is not a pointer type, call the normal function. */
3924 if (dest_align == 0)
3925 return NULL_RTX;
3927 /* If either SRC is not a pointer type, don't do this
3928 operation in-line. */
3929 if (src_align == 0)
3930 return NULL_RTX;
3932 if (currently_expanding_gimple_stmt)
3933 stringop_block_profile (currently_expanding_gimple_stmt,
3934 &expected_align, &expected_size);
3936 if (expected_align < dest_align)
3937 expected_align = dest_align;
3938 dest_mem = get_memory_rtx (dest, len);
3939 set_mem_align (dest_mem, dest_align);
3940 len_rtx = expand_normal (len);
3941 determine_block_size (len, len_rtx, &min_size, &max_size,
3942 &probable_max_size);
3943 src_str = c_getstr (src);
3945 /* If SRC is a string constant and block move would be done
3946 by pieces, we can avoid loading the string from memory
3947 and only stored the computed constants. */
3948 if (src_str
3949 && CONST_INT_P (len_rtx)
3950 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3951 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3952 CONST_CAST (char *, src_str),
3953 dest_align, false))
3955 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3956 builtin_memcpy_read_str,
3957 CONST_CAST (char *, src_str),
3958 dest_align, false, retmode);
3959 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3960 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3961 return dest_mem;
3964 src_mem = get_memory_rtx (src, len);
3965 set_mem_align (src_mem, src_align);
3967 /* Copy word part most expediently. */
3968 enum block_op_methods method = BLOCK_OP_NORMAL;
3969 if (CALL_EXPR_TAILCALL (exp)
3970 && (retmode == RETURN_BEGIN || target == const0_rtx))
3971 method = BLOCK_OP_TAILCALL;
3972 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3973 && retmode == RETURN_END
3974 && target != const0_rtx);
3975 if (use_mempcpy_call)
3976 method = BLOCK_OP_NO_LIBCALL_RET;
3977 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3978 expected_align, expected_size,
3979 min_size, max_size, probable_max_size,
3980 use_mempcpy_call, &is_move_done);
3982 /* Bail out when a mempcpy call would be expanded as libcall and when
3983 we have a target that provides a fast implementation
3984 of mempcpy routine. */
3985 if (!is_move_done)
3986 return NULL_RTX;
3988 if (dest_addr == pc_rtx)
3989 return NULL_RTX;
3991 if (dest_addr == 0)
3993 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3994 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3997 if (retmode != RETURN_BEGIN && target != const0_rtx)
3999 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4000 /* stpcpy pointer to last byte. */
4001 if (retmode == RETURN_END_MINUS_ONE)
4002 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4005 return dest_addr;
4008 static rtx
4009 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4010 rtx target, tree orig_exp, memop_ret retmode)
4012 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4013 retmode);
4016 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4017 we failed, the caller should emit a normal call, otherwise try to
4018 get the result in TARGET, if convenient.
4019 Return value is based on RETMODE argument. */
4021 static rtx
4022 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4024 class expand_operand ops[3];
4025 rtx dest_mem;
4026 rtx src_mem;
4028 if (!targetm.have_movstr ())
4029 return NULL_RTX;
4031 dest_mem = get_memory_rtx (dest, NULL);
4032 src_mem = get_memory_rtx (src, NULL);
4033 if (retmode == RETURN_BEGIN)
4035 target = force_reg (Pmode, XEXP (dest_mem, 0));
4036 dest_mem = replace_equiv_address (dest_mem, target);
4039 create_output_operand (&ops[0],
4040 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4041 create_fixed_operand (&ops[1], dest_mem);
4042 create_fixed_operand (&ops[2], src_mem);
4043 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4044 return NULL_RTX;
4046 if (retmode != RETURN_BEGIN && target != const0_rtx)
4048 target = ops[0].value;
4049 /* movstr is supposed to set end to the address of the NUL
4050 terminator. If the caller requested a mempcpy-like return value,
4051 adjust it. */
4052 if (retmode == RETURN_END)
4054 rtx tem = plus_constant (GET_MODE (target),
4055 gen_lowpart (GET_MODE (target), target), 1);
4056 emit_move_insn (target, force_operand (tem, NULL_RTX));
4059 return target;
4062 /* Do some very basic size validation of a call to the strcpy builtin
4063 given by EXP. Return NULL_RTX to have the built-in expand to a call
4064 to the library function. */
4066 static rtx
4067 expand_builtin_strcat (tree exp, rtx)
4069 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4070 || !warn_stringop_overflow)
4071 return NULL_RTX;
4073 tree dest = CALL_EXPR_ARG (exp, 0);
4074 tree src = CALL_EXPR_ARG (exp, 1);
4076 /* There is no way here to determine the length of the string in
4077 the destination to which the SRC string is being appended so
4078 just diagnose cases when the souce string is longer than
4079 the destination object. */
4081 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4083 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4084 destsize);
4086 return NULL_RTX;
4089 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4090 NULL_RTX if we failed the caller should emit a normal call, otherwise
4091 try to get the result in TARGET, if convenient (and in mode MODE if that's
4092 convenient). */
4094 static rtx
4095 expand_builtin_strcpy (tree exp, rtx target)
4097 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4098 return NULL_RTX;
4100 tree dest = CALL_EXPR_ARG (exp, 0);
4101 tree src = CALL_EXPR_ARG (exp, 1);
4103 if (warn_stringop_overflow)
4105 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4106 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4107 src, destsize);
4110 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4112 /* Check to see if the argument was declared attribute nonstring
4113 and if so, issue a warning since at this point it's not known
4114 to be nul-terminated. */
4115 tree fndecl = get_callee_fndecl (exp);
4116 maybe_warn_nonstring_arg (fndecl, exp);
4117 return ret;
4120 return NULL_RTX;
4123 /* Helper function to do the actual work for expand_builtin_strcpy. The
4124 arguments to the builtin_strcpy call DEST and SRC are broken out
4125 so that this can also be called without constructing an actual CALL_EXPR.
4126 The other arguments and return value are the same as for
4127 expand_builtin_strcpy. */
4129 static rtx
4130 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4132 /* Detect strcpy calls with unterminated arrays.. */
4133 if (tree nonstr = unterminated_array (src))
4135 /* NONSTR refers to the non-nul terminated constant array. */
4136 if (!TREE_NO_WARNING (exp))
4137 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4138 return NULL_RTX;
4141 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4144 /* Expand a call EXP to the stpcpy builtin.
4145 Return NULL_RTX if we failed the caller should emit a normal call,
4146 otherwise try to get the result in TARGET, if convenient (and in
4147 mode MODE if that's convenient). */
4149 static rtx
4150 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4152 tree dst, src;
4153 location_t loc = EXPR_LOCATION (exp);
4155 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4156 return NULL_RTX;
4158 dst = CALL_EXPR_ARG (exp, 0);
4159 src = CALL_EXPR_ARG (exp, 1);
4161 if (warn_stringop_overflow)
4163 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4164 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4165 src, destsize);
4168 /* If return value is ignored, transform stpcpy into strcpy. */
4169 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4171 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4172 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4173 return expand_expr (result, target, mode, EXPAND_NORMAL);
4175 else
4177 tree len, lenp1;
4178 rtx ret;
4180 /* Ensure we get an actual string whose length can be evaluated at
4181 compile-time, not an expression containing a string. This is
4182 because the latter will potentially produce pessimized code
4183 when used to produce the return value. */
4184 c_strlen_data lendata = { };
4185 if (!c_getstr (src, NULL)
4186 || !(len = c_strlen (src, 0, &lendata, 1)))
4187 return expand_movstr (dst, src, target,
4188 /*retmode=*/ RETURN_END_MINUS_ONE);
4190 if (lendata.decl && !TREE_NO_WARNING (exp))
4191 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4193 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4194 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4195 target, exp,
4196 /*retmode=*/ RETURN_END_MINUS_ONE);
4198 if (ret)
4199 return ret;
4201 if (TREE_CODE (len) == INTEGER_CST)
4203 rtx len_rtx = expand_normal (len);
4205 if (CONST_INT_P (len_rtx))
4207 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4209 if (ret)
4211 if (! target)
4213 if (mode != VOIDmode)
4214 target = gen_reg_rtx (mode);
4215 else
4216 target = gen_reg_rtx (GET_MODE (ret));
4218 if (GET_MODE (target) != GET_MODE (ret))
4219 ret = gen_lowpart (GET_MODE (target), ret);
4221 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4222 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4223 gcc_assert (ret);
4225 return target;
4230 return expand_movstr (dst, src, target,
4231 /*retmode=*/ RETURN_END_MINUS_ONE);
4235 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4236 arguments while being careful to avoid duplicate warnings (which could
4237 be issued if the expander were to expand the call, resulting in it
4238 being emitted in expand_call(). */
4240 static rtx
4241 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4243 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4245 /* The call has been successfully expanded. Check for nonstring
4246 arguments and issue warnings as appropriate. */
4247 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4248 return ret;
4251 return NULL_RTX;
4254 /* Check a call EXP to the stpncpy built-in for validity.
4255 Return NULL_RTX on both success and failure. */
4257 static rtx
4258 expand_builtin_stpncpy (tree exp, rtx)
4260 if (!validate_arglist (exp,
4261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4262 || !warn_stringop_overflow)
4263 return NULL_RTX;
4265 /* The source and destination of the call. */
4266 tree dest = CALL_EXPR_ARG (exp, 0);
4267 tree src = CALL_EXPR_ARG (exp, 1);
4269 /* The exact number of bytes to write (not the maximum). */
4270 tree len = CALL_EXPR_ARG (exp, 2);
4272 /* The size of the destination object. */
4273 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4275 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4277 return NULL_RTX;
4280 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4281 bytes from constant string DATA + OFFSET and return it as target
4282 constant. */
4285 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4286 scalar_int_mode mode)
4288 const char *str = (const char *) data;
4290 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4291 return const0_rtx;
4293 return c_readstr (str + offset, mode);
4296 /* Helper to check the sizes of sequences and the destination of calls
4297 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4298 success (no overflow or invalid sizes), false otherwise. */
4300 static bool
4301 check_strncat_sizes (tree exp, tree objsize)
4303 tree dest = CALL_EXPR_ARG (exp, 0);
4304 tree src = CALL_EXPR_ARG (exp, 1);
4305 tree maxread = CALL_EXPR_ARG (exp, 2);
4307 /* Try to determine the range of lengths that the source expression
4308 refers to. */
4309 c_strlen_data lendata = { };
4310 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4312 /* Try to verify that the destination is big enough for the shortest
4313 string. */
4315 if (!objsize && warn_stringop_overflow)
4317 /* If it hasn't been provided by __strncat_chk, try to determine
4318 the size of the destination object into which the source is
4319 being copied. */
4320 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4323 /* Add one for the terminating nul. */
4324 tree srclen = (lendata.minlen
4325 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4326 size_one_node)
4327 : NULL_TREE);
4329 /* The strncat function copies at most MAXREAD bytes and always appends
4330 the terminating nul so the specified upper bound should never be equal
4331 to (or greater than) the size of the destination. */
4332 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4333 && tree_int_cst_equal (objsize, maxread))
4335 location_t loc = tree_nonartificial_location (exp);
4336 loc = expansion_point_location_if_in_system_header (loc);
4338 warning_at (loc, OPT_Wstringop_overflow_,
4339 "%K%qD specified bound %E equals destination size",
4340 exp, get_callee_fndecl (exp), maxread);
4342 return false;
4345 if (!srclen
4346 || (maxread && tree_fits_uhwi_p (maxread)
4347 && tree_fits_uhwi_p (srclen)
4348 && tree_int_cst_lt (maxread, srclen)))
4349 srclen = maxread;
4351 /* The number of bytes to write is LEN but check_access will also
4352 check SRCLEN if LEN's value isn't known. */
4353 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4354 objsize);
4357 /* Similar to expand_builtin_strcat, do some very basic size validation
4358 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4359 the built-in expand to a call to the library function. */
4361 static rtx
4362 expand_builtin_strncat (tree exp, rtx)
4364 if (!validate_arglist (exp,
4365 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4366 || !warn_stringop_overflow)
4367 return NULL_RTX;
4369 tree dest = CALL_EXPR_ARG (exp, 0);
4370 tree src = CALL_EXPR_ARG (exp, 1);
4371 /* The upper bound on the number of bytes to write. */
4372 tree maxread = CALL_EXPR_ARG (exp, 2);
4373 /* The length of the source sequence. */
4374 tree slen = c_strlen (src, 1);
4376 /* Try to determine the range of lengths that the source expression
4377 refers to. Since the lengths are only used for warning and not
4378 for code generation disable strict mode below. */
4379 tree maxlen = slen;
4380 if (!maxlen)
4382 c_strlen_data lendata = { };
4383 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4384 maxlen = lendata.maxbound;
4387 /* Try to verify that the destination is big enough for the shortest
4388 string. First try to determine the size of the destination object
4389 into which the source is being copied. */
4390 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4392 /* Add one for the terminating nul. */
4393 tree srclen = (maxlen
4394 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4395 size_one_node)
4396 : NULL_TREE);
4398 /* The strncat function copies at most MAXREAD bytes and always appends
4399 the terminating nul so the specified upper bound should never be equal
4400 to (or greater than) the size of the destination. */
4401 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4402 && tree_int_cst_equal (destsize, maxread))
4404 location_t loc = tree_nonartificial_location (exp);
4405 loc = expansion_point_location_if_in_system_header (loc);
4407 warning_at (loc, OPT_Wstringop_overflow_,
4408 "%K%qD specified bound %E equals destination size",
4409 exp, get_callee_fndecl (exp), maxread);
4411 return NULL_RTX;
4414 if (!srclen
4415 || (maxread && tree_fits_uhwi_p (maxread)
4416 && tree_fits_uhwi_p (srclen)
4417 && tree_int_cst_lt (maxread, srclen)))
4418 srclen = maxread;
4420 /* The number of bytes to write is SRCLEN. */
4421 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4423 return NULL_RTX;
4426 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4427 NULL_RTX if we failed the caller should emit a normal call. */
4429 static rtx
4430 expand_builtin_strncpy (tree exp, rtx target)
4432 location_t loc = EXPR_LOCATION (exp);
4434 if (validate_arglist (exp,
4435 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4437 tree dest = CALL_EXPR_ARG (exp, 0);
4438 tree src = CALL_EXPR_ARG (exp, 1);
4439 /* The number of bytes to write (not the maximum). */
4440 tree len = CALL_EXPR_ARG (exp, 2);
4441 /* The length of the source sequence. */
4442 tree slen = c_strlen (src, 1);
4444 if (warn_stringop_overflow)
4446 tree destsize = compute_objsize (dest,
4447 warn_stringop_overflow - 1);
4449 /* The number of bytes to write is LEN but check_access will also
4450 check SLEN if LEN's value isn't known. */
4451 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4452 destsize);
4455 /* We must be passed a constant len and src parameter. */
4456 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4457 return NULL_RTX;
4459 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4461 /* We're required to pad with trailing zeros if the requested
4462 len is greater than strlen(s2)+1. In that case try to
4463 use store_by_pieces, if it fails, punt. */
4464 if (tree_int_cst_lt (slen, len))
4466 unsigned int dest_align = get_pointer_alignment (dest);
4467 const char *p = c_getstr (src);
4468 rtx dest_mem;
4470 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4471 || !can_store_by_pieces (tree_to_uhwi (len),
4472 builtin_strncpy_read_str,
4473 CONST_CAST (char *, p),
4474 dest_align, false))
4475 return NULL_RTX;
4477 dest_mem = get_memory_rtx (dest, len);
4478 store_by_pieces (dest_mem, tree_to_uhwi (len),
4479 builtin_strncpy_read_str,
4480 CONST_CAST (char *, p), dest_align, false,
4481 RETURN_BEGIN);
4482 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4483 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4484 return dest_mem;
4487 return NULL_RTX;
4490 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4491 bytes from constant string DATA + OFFSET and return it as target
4492 constant. */
4495 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4496 scalar_int_mode mode)
4498 const char *c = (const char *) data;
4499 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4501 memset (p, *c, GET_MODE_SIZE (mode));
4503 return c_readstr (p, mode);
4506 /* Callback routine for store_by_pieces. Return the RTL of a register
4507 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4508 char value given in the RTL register data. For example, if mode is
4509 4 bytes wide, return the RTL for 0x01010101*data. */
4511 static rtx
4512 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4513 scalar_int_mode mode)
4515 rtx target, coeff;
4516 size_t size;
4517 char *p;
4519 size = GET_MODE_SIZE (mode);
4520 if (size == 1)
4521 return (rtx) data;
4523 p = XALLOCAVEC (char, size);
4524 memset (p, 1, size);
4525 coeff = c_readstr (p, mode);
4527 target = convert_to_mode (mode, (rtx) data, 1);
4528 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4529 return force_reg (mode, target);
4532 /* Expand expression EXP, which is a call to the memset builtin. Return
4533 NULL_RTX if we failed the caller should emit a normal call, otherwise
4534 try to get the result in TARGET, if convenient (and in mode MODE if that's
4535 convenient). */
4537 static rtx
4538 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4540 if (!validate_arglist (exp,
4541 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4542 return NULL_RTX;
4544 tree dest = CALL_EXPR_ARG (exp, 0);
4545 tree val = CALL_EXPR_ARG (exp, 1);
4546 tree len = CALL_EXPR_ARG (exp, 2);
4548 check_memop_access (exp, dest, NULL_TREE, len);
4550 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4553 /* Helper function to do the actual work for expand_builtin_memset. The
4554 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4555 so that this can also be called without constructing an actual CALL_EXPR.
4556 The other arguments and return value are the same as for
4557 expand_builtin_memset. */
4559 static rtx
4560 expand_builtin_memset_args (tree dest, tree val, tree len,
4561 rtx target, machine_mode mode, tree orig_exp)
4563 tree fndecl, fn;
4564 enum built_in_function fcode;
4565 machine_mode val_mode;
4566 char c;
4567 unsigned int dest_align;
4568 rtx dest_mem, dest_addr, len_rtx;
4569 HOST_WIDE_INT expected_size = -1;
4570 unsigned int expected_align = 0;
4571 unsigned HOST_WIDE_INT min_size;
4572 unsigned HOST_WIDE_INT max_size;
4573 unsigned HOST_WIDE_INT probable_max_size;
4575 dest_align = get_pointer_alignment (dest);
4577 /* If DEST is not a pointer type, don't do this operation in-line. */
4578 if (dest_align == 0)
4579 return NULL_RTX;
4581 if (currently_expanding_gimple_stmt)
4582 stringop_block_profile (currently_expanding_gimple_stmt,
4583 &expected_align, &expected_size);
4585 if (expected_align < dest_align)
4586 expected_align = dest_align;
4588 /* If the LEN parameter is zero, return DEST. */
4589 if (integer_zerop (len))
4591 /* Evaluate and ignore VAL in case it has side-effects. */
4592 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4593 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4596 /* Stabilize the arguments in case we fail. */
4597 dest = builtin_save_expr (dest);
4598 val = builtin_save_expr (val);
4599 len = builtin_save_expr (len);
4601 len_rtx = expand_normal (len);
4602 determine_block_size (len, len_rtx, &min_size, &max_size,
4603 &probable_max_size);
4604 dest_mem = get_memory_rtx (dest, len);
4605 val_mode = TYPE_MODE (unsigned_char_type_node);
4607 if (TREE_CODE (val) != INTEGER_CST)
4609 rtx val_rtx;
4611 val_rtx = expand_normal (val);
4612 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4614 /* Assume that we can memset by pieces if we can store
4615 * the coefficients by pieces (in the required modes).
4616 * We can't pass builtin_memset_gen_str as that emits RTL. */
4617 c = 1;
4618 if (tree_fits_uhwi_p (len)
4619 && can_store_by_pieces (tree_to_uhwi (len),
4620 builtin_memset_read_str, &c, dest_align,
4621 true))
4623 val_rtx = force_reg (val_mode, val_rtx);
4624 store_by_pieces (dest_mem, tree_to_uhwi (len),
4625 builtin_memset_gen_str, val_rtx, dest_align,
4626 true, RETURN_BEGIN);
4628 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4629 dest_align, expected_align,
4630 expected_size, min_size, max_size,
4631 probable_max_size))
4632 goto do_libcall;
4634 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4635 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4636 return dest_mem;
4639 if (target_char_cast (val, &c))
4640 goto do_libcall;
4642 if (c)
4644 if (tree_fits_uhwi_p (len)
4645 && can_store_by_pieces (tree_to_uhwi (len),
4646 builtin_memset_read_str, &c, dest_align,
4647 true))
4648 store_by_pieces (dest_mem, tree_to_uhwi (len),
4649 builtin_memset_read_str, &c, dest_align, true,
4650 RETURN_BEGIN);
4651 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4652 gen_int_mode (c, val_mode),
4653 dest_align, expected_align,
4654 expected_size, min_size, max_size,
4655 probable_max_size))
4656 goto do_libcall;
4658 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4659 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4660 return dest_mem;
4663 set_mem_align (dest_mem, dest_align);
4664 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4665 CALL_EXPR_TAILCALL (orig_exp)
4666 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4667 expected_align, expected_size,
4668 min_size, max_size,
4669 probable_max_size);
4671 if (dest_addr == 0)
4673 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4674 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4677 return dest_addr;
4679 do_libcall:
4680 fndecl = get_callee_fndecl (orig_exp);
4681 fcode = DECL_FUNCTION_CODE (fndecl);
4682 if (fcode == BUILT_IN_MEMSET)
4683 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4684 dest, val, len);
4685 else if (fcode == BUILT_IN_BZERO)
4686 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4687 dest, len);
4688 else
4689 gcc_unreachable ();
4690 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4691 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4692 return expand_call (fn, target, target == const0_rtx);
4695 /* Expand expression EXP, which is a call to the bzero builtin. Return
4696 NULL_RTX if we failed the caller should emit a normal call. */
4698 static rtx
4699 expand_builtin_bzero (tree exp)
4701 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4702 return NULL_RTX;
4704 tree dest = CALL_EXPR_ARG (exp, 0);
4705 tree size = CALL_EXPR_ARG (exp, 1);
4707 check_memop_access (exp, dest, NULL_TREE, size);
4709 /* New argument list transforming bzero(ptr x, int y) to
4710 memset(ptr x, int 0, size_t y). This is done this way
4711 so that if it isn't expanded inline, we fallback to
4712 calling bzero instead of memset. */
4714 location_t loc = EXPR_LOCATION (exp);
4716 return expand_builtin_memset_args (dest, integer_zero_node,
4717 fold_convert_loc (loc,
4718 size_type_node, size),
4719 const0_rtx, VOIDmode, exp);
4722 /* Try to expand cmpstr operation ICODE with the given operands.
4723 Return the result rtx on success, otherwise return null. */
4725 static rtx
4726 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4727 HOST_WIDE_INT align)
4729 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4731 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4732 target = NULL_RTX;
4734 class expand_operand ops[4];
4735 create_output_operand (&ops[0], target, insn_mode);
4736 create_fixed_operand (&ops[1], arg1_rtx);
4737 create_fixed_operand (&ops[2], arg2_rtx);
4738 create_integer_operand (&ops[3], align);
4739 if (maybe_expand_insn (icode, 4, ops))
4740 return ops[0].value;
4741 return NULL_RTX;
4744 /* Expand expression EXP, which is a call to the memcmp built-in function.
4745 Return NULL_RTX if we failed and the caller should emit a normal call,
4746 otherwise try to get the result in TARGET, if convenient.
4747 RESULT_EQ is true if we can relax the returned value to be either zero
4748 or nonzero, without caring about the sign. */
4750 static rtx
4751 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4753 if (!validate_arglist (exp,
4754 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4755 return NULL_RTX;
4757 tree arg1 = CALL_EXPR_ARG (exp, 0);
4758 tree arg2 = CALL_EXPR_ARG (exp, 1);
4759 tree len = CALL_EXPR_ARG (exp, 2);
4760 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4761 bool no_overflow = true;
4763 /* Diagnose calls where the specified length exceeds the size of either
4764 object. */
4765 tree size = compute_objsize (arg1, 0);
4766 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4767 len, /*maxread=*/NULL_TREE, size,
4768 /*objsize=*/NULL_TREE);
4769 if (no_overflow)
4771 size = compute_objsize (arg2, 0);
4772 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4773 len, /*maxread=*/NULL_TREE, size,
4774 /*objsize=*/NULL_TREE);
4777 /* If the specified length exceeds the size of either object,
4778 call the function. */
4779 if (!no_overflow)
4780 return NULL_RTX;
4782 /* Due to the performance benefit, always inline the calls first
4783 when result_eq is false. */
4784 rtx result = NULL_RTX;
4786 if (!result_eq && fcode != BUILT_IN_BCMP)
4788 result = inline_expand_builtin_string_cmp (exp, target);
4789 if (result)
4790 return result;
4793 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4794 location_t loc = EXPR_LOCATION (exp);
4796 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4797 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4799 /* If we don't have POINTER_TYPE, call the function. */
4800 if (arg1_align == 0 || arg2_align == 0)
4801 return NULL_RTX;
4803 rtx arg1_rtx = get_memory_rtx (arg1, len);
4804 rtx arg2_rtx = get_memory_rtx (arg2, len);
4805 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4807 /* Set MEM_SIZE as appropriate. */
4808 if (CONST_INT_P (len_rtx))
4810 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4811 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4814 by_pieces_constfn constfn = NULL;
4816 const char *src_str = c_getstr (arg2);
4817 if (result_eq && src_str == NULL)
4819 src_str = c_getstr (arg1);
4820 if (src_str != NULL)
4821 std::swap (arg1_rtx, arg2_rtx);
4824 /* If SRC is a string constant and block move would be done
4825 by pieces, we can avoid loading the string from memory
4826 and only stored the computed constants. */
4827 if (src_str
4828 && CONST_INT_P (len_rtx)
4829 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4830 constfn = builtin_memcpy_read_str;
4832 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4833 TREE_TYPE (len), target,
4834 result_eq, constfn,
4835 CONST_CAST (char *, src_str));
4837 if (result)
4839 /* Return the value in the proper mode for this function. */
4840 if (GET_MODE (result) == mode)
4841 return result;
4843 if (target != 0)
4845 convert_move (target, result, 0);
4846 return target;
4849 return convert_to_mode (mode, result, 0);
4852 return NULL_RTX;
4855 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4856 if we failed the caller should emit a normal call, otherwise try to get
4857 the result in TARGET, if convenient. */
4859 static rtx
4860 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4862 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4863 return NULL_RTX;
4865 /* Due to the performance benefit, always inline the calls first. */
4866 rtx result = NULL_RTX;
4867 result = inline_expand_builtin_string_cmp (exp, target);
4868 if (result)
4869 return result;
4871 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4872 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4873 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4874 return NULL_RTX;
4876 tree arg1 = CALL_EXPR_ARG (exp, 0);
4877 tree arg2 = CALL_EXPR_ARG (exp, 1);
4879 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4880 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4882 /* If we don't have POINTER_TYPE, call the function. */
4883 if (arg1_align == 0 || arg2_align == 0)
4884 return NULL_RTX;
4886 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4887 arg1 = builtin_save_expr (arg1);
4888 arg2 = builtin_save_expr (arg2);
4890 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4891 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4893 /* Try to call cmpstrsi. */
4894 if (cmpstr_icode != CODE_FOR_nothing)
4895 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4896 MIN (arg1_align, arg2_align));
4898 /* Try to determine at least one length and call cmpstrnsi. */
4899 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4901 tree len;
4902 rtx arg3_rtx;
4904 tree len1 = c_strlen (arg1, 1);
4905 tree len2 = c_strlen (arg2, 1);
4907 if (len1)
4908 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4909 if (len2)
4910 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4912 /* If we don't have a constant length for the first, use the length
4913 of the second, if we know it. We don't require a constant for
4914 this case; some cost analysis could be done if both are available
4915 but neither is constant. For now, assume they're equally cheap,
4916 unless one has side effects. If both strings have constant lengths,
4917 use the smaller. */
4919 if (!len1)
4920 len = len2;
4921 else if (!len2)
4922 len = len1;
4923 else if (TREE_SIDE_EFFECTS (len1))
4924 len = len2;
4925 else if (TREE_SIDE_EFFECTS (len2))
4926 len = len1;
4927 else if (TREE_CODE (len1) != INTEGER_CST)
4928 len = len2;
4929 else if (TREE_CODE (len2) != INTEGER_CST)
4930 len = len1;
4931 else if (tree_int_cst_lt (len1, len2))
4932 len = len1;
4933 else
4934 len = len2;
4936 /* If both arguments have side effects, we cannot optimize. */
4937 if (len && !TREE_SIDE_EFFECTS (len))
4939 arg3_rtx = expand_normal (len);
4940 result = expand_cmpstrn_or_cmpmem
4941 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4942 arg3_rtx, MIN (arg1_align, arg2_align));
4946 tree fndecl = get_callee_fndecl (exp);
4947 if (result)
4949 /* Check to see if the argument was declared attribute nonstring
4950 and if so, issue a warning since at this point it's not known
4951 to be nul-terminated. */
4952 maybe_warn_nonstring_arg (fndecl, exp);
4954 /* Return the value in the proper mode for this function. */
4955 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4956 if (GET_MODE (result) == mode)
4957 return result;
4958 if (target == 0)
4959 return convert_to_mode (mode, result, 0);
4960 convert_move (target, result, 0);
4961 return target;
4964 /* Expand the library call ourselves using a stabilized argument
4965 list to avoid re-evaluating the function's arguments twice. */
4966 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4967 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4968 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4969 return expand_call (fn, target, target == const0_rtx);
4972 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4973 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4974 the result in TARGET, if convenient. */
4976 static rtx
4977 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4978 ATTRIBUTE_UNUSED machine_mode mode)
4980 if (!validate_arglist (exp,
4981 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4982 return NULL_RTX;
4984 /* Due to the performance benefit, always inline the calls first. */
4985 rtx result = NULL_RTX;
4986 result = inline_expand_builtin_string_cmp (exp, target);
4987 if (result)
4988 return result;
4990 /* If c_strlen can determine an expression for one of the string
4991 lengths, and it doesn't have side effects, then emit cmpstrnsi
4992 using length MIN(strlen(string)+1, arg3). */
4993 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4994 if (cmpstrn_icode == CODE_FOR_nothing)
4995 return NULL_RTX;
4997 tree len;
4999 tree arg1 = CALL_EXPR_ARG (exp, 0);
5000 tree arg2 = CALL_EXPR_ARG (exp, 1);
5001 tree arg3 = CALL_EXPR_ARG (exp, 2);
5003 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5004 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5006 tree len1 = c_strlen (arg1, 1);
5007 tree len2 = c_strlen (arg2, 1);
5009 location_t loc = EXPR_LOCATION (exp);
5011 if (len1)
5012 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5013 if (len2)
5014 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5016 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5018 /* If we don't have a constant length for the first, use the length
5019 of the second, if we know it. If neither string is constant length,
5020 use the given length argument. We don't require a constant for
5021 this case; some cost analysis could be done if both are available
5022 but neither is constant. For now, assume they're equally cheap,
5023 unless one has side effects. If both strings have constant lengths,
5024 use the smaller. */
5026 if (!len1 && !len2)
5027 len = len3;
5028 else if (!len1)
5029 len = len2;
5030 else if (!len2)
5031 len = len1;
5032 else if (TREE_SIDE_EFFECTS (len1))
5033 len = len2;
5034 else if (TREE_SIDE_EFFECTS (len2))
5035 len = len1;
5036 else if (TREE_CODE (len1) != INTEGER_CST)
5037 len = len2;
5038 else if (TREE_CODE (len2) != INTEGER_CST)
5039 len = len1;
5040 else if (tree_int_cst_lt (len1, len2))
5041 len = len1;
5042 else
5043 len = len2;
5045 /* If we are not using the given length, we must incorporate it here.
5046 The actual new length parameter will be MIN(len,arg3) in this case. */
5047 if (len != len3)
5049 len = fold_convert_loc (loc, sizetype, len);
5050 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5052 rtx arg1_rtx = get_memory_rtx (arg1, len);
5053 rtx arg2_rtx = get_memory_rtx (arg2, len);
5054 rtx arg3_rtx = expand_normal (len);
5055 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5056 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5057 MIN (arg1_align, arg2_align));
5059 tree fndecl = get_callee_fndecl (exp);
5060 if (result)
5062 /* Check to see if the argument was declared attribute nonstring
5063 and if so, issue a warning since at this point it's not known
5064 to be nul-terminated. */
5065 maybe_warn_nonstring_arg (fndecl, exp);
5067 /* Return the value in the proper mode for this function. */
5068 mode = TYPE_MODE (TREE_TYPE (exp));
5069 if (GET_MODE (result) == mode)
5070 return result;
5071 if (target == 0)
5072 return convert_to_mode (mode, result, 0);
5073 convert_move (target, result, 0);
5074 return target;
5077 /* Expand the library call ourselves using a stabilized argument
5078 list to avoid re-evaluating the function's arguments twice. */
5079 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5080 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5081 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5082 return expand_call (fn, target, target == const0_rtx);
5085 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5086 if that's convenient. */
5089 expand_builtin_saveregs (void)
5091 rtx val;
5092 rtx_insn *seq;
5094 /* Don't do __builtin_saveregs more than once in a function.
5095 Save the result of the first call and reuse it. */
5096 if (saveregs_value != 0)
5097 return saveregs_value;
5099 /* When this function is called, it means that registers must be
5100 saved on entry to this function. So we migrate the call to the
5101 first insn of this function. */
5103 start_sequence ();
5105 /* Do whatever the machine needs done in this case. */
5106 val = targetm.calls.expand_builtin_saveregs ();
5108 seq = get_insns ();
5109 end_sequence ();
5111 saveregs_value = val;
5113 /* Put the insns after the NOTE that starts the function. If this
5114 is inside a start_sequence, make the outer-level insn chain current, so
5115 the code is placed at the start of the function. */
5116 push_topmost_sequence ();
5117 emit_insn_after (seq, entry_of_function ());
5118 pop_topmost_sequence ();
5120 return val;
5123 /* Expand a call to __builtin_next_arg. */
5125 static rtx
5126 expand_builtin_next_arg (void)
5128 /* Checking arguments is already done in fold_builtin_next_arg
5129 that must be called before this function. */
5130 return expand_binop (ptr_mode, add_optab,
5131 crtl->args.internal_arg_pointer,
5132 crtl->args.arg_offset_rtx,
5133 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5136 /* Make it easier for the backends by protecting the valist argument
5137 from multiple evaluations. */
5139 static tree
5140 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5142 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5144 /* The current way of determining the type of valist is completely
5145 bogus. We should have the information on the va builtin instead. */
5146 if (!vatype)
5147 vatype = targetm.fn_abi_va_list (cfun->decl);
5149 if (TREE_CODE (vatype) == ARRAY_TYPE)
5151 if (TREE_SIDE_EFFECTS (valist))
5152 valist = save_expr (valist);
5154 /* For this case, the backends will be expecting a pointer to
5155 vatype, but it's possible we've actually been given an array
5156 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5157 So fix it. */
5158 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5160 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5161 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5164 else
5166 tree pt = build_pointer_type (vatype);
5168 if (! needs_lvalue)
5170 if (! TREE_SIDE_EFFECTS (valist))
5171 return valist;
5173 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5174 TREE_SIDE_EFFECTS (valist) = 1;
5177 if (TREE_SIDE_EFFECTS (valist))
5178 valist = save_expr (valist);
5179 valist = fold_build2_loc (loc, MEM_REF,
5180 vatype, valist, build_int_cst (pt, 0));
5183 return valist;
5186 /* The "standard" definition of va_list is void*. */
5188 tree
5189 std_build_builtin_va_list (void)
5191 return ptr_type_node;
5194 /* The "standard" abi va_list is va_list_type_node. */
5196 tree
5197 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5199 return va_list_type_node;
5202 /* The "standard" type of va_list is va_list_type_node. */
5204 tree
5205 std_canonical_va_list_type (tree type)
5207 tree wtype, htype;
5209 wtype = va_list_type_node;
5210 htype = type;
5212 if (TREE_CODE (wtype) == ARRAY_TYPE)
5214 /* If va_list is an array type, the argument may have decayed
5215 to a pointer type, e.g. by being passed to another function.
5216 In that case, unwrap both types so that we can compare the
5217 underlying records. */
5218 if (TREE_CODE (htype) == ARRAY_TYPE
5219 || POINTER_TYPE_P (htype))
5221 wtype = TREE_TYPE (wtype);
5222 htype = TREE_TYPE (htype);
5225 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5226 return va_list_type_node;
5228 return NULL_TREE;
5231 /* The "standard" implementation of va_start: just assign `nextarg' to
5232 the variable. */
5234 void
5235 std_expand_builtin_va_start (tree valist, rtx nextarg)
5237 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5238 convert_move (va_r, nextarg, 0);
5241 /* Expand EXP, a call to __builtin_va_start. */
5243 static rtx
5244 expand_builtin_va_start (tree exp)
5246 rtx nextarg;
5247 tree valist;
5248 location_t loc = EXPR_LOCATION (exp);
5250 if (call_expr_nargs (exp) < 2)
5252 error_at (loc, "too few arguments to function %<va_start%>");
5253 return const0_rtx;
5256 if (fold_builtin_next_arg (exp, true))
5257 return const0_rtx;
5259 nextarg = expand_builtin_next_arg ();
5260 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5262 if (targetm.expand_builtin_va_start)
5263 targetm.expand_builtin_va_start (valist, nextarg);
5264 else
5265 std_expand_builtin_va_start (valist, nextarg);
5267 return const0_rtx;
5270 /* Expand EXP, a call to __builtin_va_end. */
5272 static rtx
5273 expand_builtin_va_end (tree exp)
5275 tree valist = CALL_EXPR_ARG (exp, 0);
5277 /* Evaluate for side effects, if needed. I hate macros that don't
5278 do that. */
5279 if (TREE_SIDE_EFFECTS (valist))
5280 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5282 return const0_rtx;
5285 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5286 builtin rather than just as an assignment in stdarg.h because of the
5287 nastiness of array-type va_list types. */
5289 static rtx
5290 expand_builtin_va_copy (tree exp)
5292 tree dst, src, t;
5293 location_t loc = EXPR_LOCATION (exp);
5295 dst = CALL_EXPR_ARG (exp, 0);
5296 src = CALL_EXPR_ARG (exp, 1);
5298 dst = stabilize_va_list_loc (loc, dst, 1);
5299 src = stabilize_va_list_loc (loc, src, 0);
5301 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5303 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5305 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5306 TREE_SIDE_EFFECTS (t) = 1;
5307 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5309 else
5311 rtx dstb, srcb, size;
5313 /* Evaluate to pointers. */
5314 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5315 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5316 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5317 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5319 dstb = convert_memory_address (Pmode, dstb);
5320 srcb = convert_memory_address (Pmode, srcb);
5322 /* "Dereference" to BLKmode memories. */
5323 dstb = gen_rtx_MEM (BLKmode, dstb);
5324 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5325 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5326 srcb = gen_rtx_MEM (BLKmode, srcb);
5327 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5328 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5330 /* Copy. */
5331 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5334 return const0_rtx;
5337 /* Expand a call to one of the builtin functions __builtin_frame_address or
5338 __builtin_return_address. */
5340 static rtx
5341 expand_builtin_frame_address (tree fndecl, tree exp)
5343 /* The argument must be a nonnegative integer constant.
5344 It counts the number of frames to scan up the stack.
5345 The value is either the frame pointer value or the return
5346 address saved in that frame. */
5347 if (call_expr_nargs (exp) == 0)
5348 /* Warning about missing arg was already issued. */
5349 return const0_rtx;
5350 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5352 error ("invalid argument to %qD", fndecl);
5353 return const0_rtx;
5355 else
5357 /* Number of frames to scan up the stack. */
5358 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5360 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5362 /* Some ports cannot access arbitrary stack frames. */
5363 if (tem == NULL)
5365 warning (0, "unsupported argument to %qD", fndecl);
5366 return const0_rtx;
5369 if (count)
5371 /* Warn since no effort is made to ensure that any frame
5372 beyond the current one exists or can be safely reached. */
5373 warning (OPT_Wframe_address, "calling %qD with "
5374 "a nonzero argument is unsafe", fndecl);
5377 /* For __builtin_frame_address, return what we've got. */
5378 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5379 return tem;
5381 if (!REG_P (tem)
5382 && ! CONSTANT_P (tem))
5383 tem = copy_addr_to_reg (tem);
5384 return tem;
5388 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5389 failed and the caller should emit a normal call. */
5391 static rtx
5392 expand_builtin_alloca (tree exp)
5394 rtx op0;
5395 rtx result;
5396 unsigned int align;
5397 tree fndecl = get_callee_fndecl (exp);
5398 HOST_WIDE_INT max_size;
5399 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5400 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5401 bool valid_arglist
5402 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5403 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5404 VOID_TYPE)
5405 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5406 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5407 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5409 if (!valid_arglist)
5410 return NULL_RTX;
5412 if ((alloca_for_var
5413 && warn_vla_limit >= HOST_WIDE_INT_MAX
5414 && warn_alloc_size_limit < warn_vla_limit)
5415 || (!alloca_for_var
5416 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5417 && warn_alloc_size_limit < warn_alloca_limit
5420 /* -Walloca-larger-than and -Wvla-larger-than settings of
5421 less than HOST_WIDE_INT_MAX override the more general
5422 -Walloc-size-larger-than so unless either of the former
5423 options is smaller than the last one (wchich would imply
5424 that the call was already checked), check the alloca
5425 arguments for overflow. */
5426 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5427 int idx[] = { 0, -1 };
5428 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5431 /* Compute the argument. */
5432 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5434 /* Compute the alignment. */
5435 align = (fcode == BUILT_IN_ALLOCA
5436 ? BIGGEST_ALIGNMENT
5437 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5439 /* Compute the maximum size. */
5440 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5441 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5442 : -1);
5444 /* Allocate the desired space. If the allocation stems from the declaration
5445 of a variable-sized object, it cannot accumulate. */
5446 result
5447 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5448 result = convert_memory_address (ptr_mode, result);
5450 return result;
5453 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5454 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5455 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5456 handle_builtin_stack_restore function. */
5458 static rtx
5459 expand_asan_emit_allocas_unpoison (tree exp)
5461 tree arg0 = CALL_EXPR_ARG (exp, 0);
5462 tree arg1 = CALL_EXPR_ARG (exp, 1);
5463 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5464 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5465 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5466 stack_pointer_rtx, NULL_RTX, 0,
5467 OPTAB_LIB_WIDEN);
5468 off = convert_modes (ptr_mode, Pmode, off, 0);
5469 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5470 OPTAB_LIB_WIDEN);
5471 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5472 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5473 top, ptr_mode, bot, ptr_mode);
5474 return ret;
5477 /* Expand a call to bswap builtin in EXP.
5478 Return NULL_RTX if a normal call should be emitted rather than expanding the
5479 function in-line. If convenient, the result should be placed in TARGET.
5480 SUBTARGET may be used as the target for computing one of EXP's operands. */
5482 static rtx
5483 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5484 rtx subtarget)
5486 tree arg;
5487 rtx op0;
5489 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5490 return NULL_RTX;
5492 arg = CALL_EXPR_ARG (exp, 0);
5493 op0 = expand_expr (arg,
5494 subtarget && GET_MODE (subtarget) == target_mode
5495 ? subtarget : NULL_RTX,
5496 target_mode, EXPAND_NORMAL);
5497 if (GET_MODE (op0) != target_mode)
5498 op0 = convert_to_mode (target_mode, op0, 1);
5500 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5502 gcc_assert (target);
5504 return convert_to_mode (target_mode, target, 1);
5507 /* Expand a call to a unary builtin in EXP.
5508 Return NULL_RTX if a normal call should be emitted rather than expanding the
5509 function in-line. If convenient, the result should be placed in TARGET.
5510 SUBTARGET may be used as the target for computing one of EXP's operands. */
5512 static rtx
5513 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5514 rtx subtarget, optab op_optab)
5516 rtx op0;
5518 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5519 return NULL_RTX;
5521 /* Compute the argument. */
5522 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5523 (subtarget
5524 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5525 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5526 VOIDmode, EXPAND_NORMAL);
5527 /* Compute op, into TARGET if possible.
5528 Set TARGET to wherever the result comes back. */
5529 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5530 op_optab, op0, target, op_optab != clrsb_optab);
5531 gcc_assert (target);
5533 return convert_to_mode (target_mode, target, 0);
5536 /* Expand a call to __builtin_expect. We just return our argument
5537 as the builtin_expect semantic should've been already executed by
5538 tree branch prediction pass. */
5540 static rtx
5541 expand_builtin_expect (tree exp, rtx target)
5543 tree arg;
5545 if (call_expr_nargs (exp) < 2)
5546 return const0_rtx;
5547 arg = CALL_EXPR_ARG (exp, 0);
5549 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5550 /* When guessing was done, the hints should be already stripped away. */
5551 gcc_assert (!flag_guess_branch_prob
5552 || optimize == 0 || seen_error ());
5553 return target;
5556 /* Expand a call to __builtin_expect_with_probability. We just return our
5557 argument as the builtin_expect semantic should've been already executed by
5558 tree branch prediction pass. */
5560 static rtx
5561 expand_builtin_expect_with_probability (tree exp, rtx target)
5563 tree arg;
5565 if (call_expr_nargs (exp) < 3)
5566 return const0_rtx;
5567 arg = CALL_EXPR_ARG (exp, 0);
5569 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5570 /* When guessing was done, the hints should be already stripped away. */
5571 gcc_assert (!flag_guess_branch_prob
5572 || optimize == 0 || seen_error ());
5573 return target;
5577 /* Expand a call to __builtin_assume_aligned. We just return our first
5578 argument as the builtin_assume_aligned semantic should've been already
5579 executed by CCP. */
5581 static rtx
5582 expand_builtin_assume_aligned (tree exp, rtx target)
5584 if (call_expr_nargs (exp) < 2)
5585 return const0_rtx;
5586 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5587 EXPAND_NORMAL);
5588 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5589 && (call_expr_nargs (exp) < 3
5590 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5591 return target;
5594 void
5595 expand_builtin_trap (void)
5597 if (targetm.have_trap ())
5599 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5600 /* For trap insns when not accumulating outgoing args force
5601 REG_ARGS_SIZE note to prevent crossjumping of calls with
5602 different args sizes. */
5603 if (!ACCUMULATE_OUTGOING_ARGS)
5604 add_args_size_note (insn, stack_pointer_delta);
5606 else
5608 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5609 tree call_expr = build_call_expr (fn, 0);
5610 expand_call (call_expr, NULL_RTX, false);
5613 emit_barrier ();
5616 /* Expand a call to __builtin_unreachable. We do nothing except emit
5617 a barrier saying that control flow will not pass here.
5619 It is the responsibility of the program being compiled to ensure
5620 that control flow does never reach __builtin_unreachable. */
5621 static void
5622 expand_builtin_unreachable (void)
5624 emit_barrier ();
5627 /* Expand EXP, a call to fabs, fabsf or fabsl.
5628 Return NULL_RTX if a normal call should be emitted rather than expanding
5629 the function inline. If convenient, the result should be placed
5630 in TARGET. SUBTARGET may be used as the target for computing
5631 the operand. */
5633 static rtx
5634 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5636 machine_mode mode;
5637 tree arg;
5638 rtx op0;
5640 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5641 return NULL_RTX;
5643 arg = CALL_EXPR_ARG (exp, 0);
5644 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5645 mode = TYPE_MODE (TREE_TYPE (arg));
5646 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5647 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5650 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5651 Return NULL is a normal call should be emitted rather than expanding the
5652 function inline. If convenient, the result should be placed in TARGET.
5653 SUBTARGET may be used as the target for computing the operand. */
5655 static rtx
5656 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5658 rtx op0, op1;
5659 tree arg;
5661 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5662 return NULL_RTX;
5664 arg = CALL_EXPR_ARG (exp, 0);
5665 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5667 arg = CALL_EXPR_ARG (exp, 1);
5668 op1 = expand_normal (arg);
5670 return expand_copysign (op0, op1, target);
5673 /* Expand a call to __builtin___clear_cache. */
5675 static rtx
5676 expand_builtin___clear_cache (tree exp)
5678 if (!targetm.code_for_clear_cache)
5680 #ifdef CLEAR_INSN_CACHE
5681 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5682 does something. Just do the default expansion to a call to
5683 __clear_cache(). */
5684 return NULL_RTX;
5685 #else
5686 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5687 does nothing. There is no need to call it. Do nothing. */
5688 return const0_rtx;
5689 #endif /* CLEAR_INSN_CACHE */
5692 /* We have a "clear_cache" insn, and it will handle everything. */
5693 tree begin, end;
5694 rtx begin_rtx, end_rtx;
5696 /* We must not expand to a library call. If we did, any
5697 fallback library function in libgcc that might contain a call to
5698 __builtin___clear_cache() would recurse infinitely. */
5699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5701 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5702 return const0_rtx;
5705 if (targetm.have_clear_cache ())
5707 class expand_operand ops[2];
5709 begin = CALL_EXPR_ARG (exp, 0);
5710 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5712 end = CALL_EXPR_ARG (exp, 1);
5713 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5715 create_address_operand (&ops[0], begin_rtx);
5716 create_address_operand (&ops[1], end_rtx);
5717 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5718 return const0_rtx;
5720 return const0_rtx;
5723 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5725 static rtx
5726 round_trampoline_addr (rtx tramp)
5728 rtx temp, addend, mask;
5730 /* If we don't need too much alignment, we'll have been guaranteed
5731 proper alignment by get_trampoline_type. */
5732 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5733 return tramp;
5735 /* Round address up to desired boundary. */
5736 temp = gen_reg_rtx (Pmode);
5737 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5738 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5740 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5741 temp, 0, OPTAB_LIB_WIDEN);
5742 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5743 temp, 0, OPTAB_LIB_WIDEN);
5745 return tramp;
5748 static rtx
5749 expand_builtin_init_trampoline (tree exp, bool onstack)
5751 tree t_tramp, t_func, t_chain;
5752 rtx m_tramp, r_tramp, r_chain, tmp;
5754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5755 POINTER_TYPE, VOID_TYPE))
5756 return NULL_RTX;
5758 t_tramp = CALL_EXPR_ARG (exp, 0);
5759 t_func = CALL_EXPR_ARG (exp, 1);
5760 t_chain = CALL_EXPR_ARG (exp, 2);
5762 r_tramp = expand_normal (t_tramp);
5763 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5764 MEM_NOTRAP_P (m_tramp) = 1;
5766 /* If ONSTACK, the TRAMP argument should be the address of a field
5767 within the local function's FRAME decl. Either way, let's see if
5768 we can fill in the MEM_ATTRs for this memory. */
5769 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5770 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5772 /* Creator of a heap trampoline is responsible for making sure the
5773 address is aligned to at least STACK_BOUNDARY. Normally malloc
5774 will ensure this anyhow. */
5775 tmp = round_trampoline_addr (r_tramp);
5776 if (tmp != r_tramp)
5778 m_tramp = change_address (m_tramp, BLKmode, tmp);
5779 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5780 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5783 /* The FUNC argument should be the address of the nested function.
5784 Extract the actual function decl to pass to the hook. */
5785 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5786 t_func = TREE_OPERAND (t_func, 0);
5787 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5789 r_chain = expand_normal (t_chain);
5791 /* Generate insns to initialize the trampoline. */
5792 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5794 if (onstack)
5796 trampolines_created = 1;
5798 if (targetm.calls.custom_function_descriptors != 0)
5799 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5800 "trampoline generated for nested function %qD", t_func);
5803 return const0_rtx;
5806 static rtx
5807 expand_builtin_adjust_trampoline (tree exp)
5809 rtx tramp;
5811 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5812 return NULL_RTX;
5814 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5815 tramp = round_trampoline_addr (tramp);
5816 if (targetm.calls.trampoline_adjust_address)
5817 tramp = targetm.calls.trampoline_adjust_address (tramp);
5819 return tramp;
5822 /* Expand a call to the builtin descriptor initialization routine.
5823 A descriptor is made up of a couple of pointers to the static
5824 chain and the code entry in this order. */
5826 static rtx
5827 expand_builtin_init_descriptor (tree exp)
5829 tree t_descr, t_func, t_chain;
5830 rtx m_descr, r_descr, r_func, r_chain;
5832 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5833 VOID_TYPE))
5834 return NULL_RTX;
5836 t_descr = CALL_EXPR_ARG (exp, 0);
5837 t_func = CALL_EXPR_ARG (exp, 1);
5838 t_chain = CALL_EXPR_ARG (exp, 2);
5840 r_descr = expand_normal (t_descr);
5841 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5842 MEM_NOTRAP_P (m_descr) = 1;
5843 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5845 r_func = expand_normal (t_func);
5846 r_chain = expand_normal (t_chain);
5848 /* Generate insns to initialize the descriptor. */
5849 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5850 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5851 POINTER_SIZE / BITS_PER_UNIT), r_func);
5853 return const0_rtx;
5856 /* Expand a call to the builtin descriptor adjustment routine. */
5858 static rtx
5859 expand_builtin_adjust_descriptor (tree exp)
5861 rtx tramp;
5863 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5864 return NULL_RTX;
5866 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5868 /* Unalign the descriptor to allow runtime identification. */
5869 tramp = plus_constant (ptr_mode, tramp,
5870 targetm.calls.custom_function_descriptors);
5872 return force_operand (tramp, NULL_RTX);
5875 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5876 function. The function first checks whether the back end provides
5877 an insn to implement signbit for the respective mode. If not, it
5878 checks whether the floating point format of the value is such that
5879 the sign bit can be extracted. If that is not the case, error out.
5880 EXP is the expression that is a call to the builtin function; if
5881 convenient, the result should be placed in TARGET. */
5882 static rtx
5883 expand_builtin_signbit (tree exp, rtx target)
5885 const struct real_format *fmt;
5886 scalar_float_mode fmode;
5887 scalar_int_mode rmode, imode;
5888 tree arg;
5889 int word, bitpos;
5890 enum insn_code icode;
5891 rtx temp;
5892 location_t loc = EXPR_LOCATION (exp);
5894 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5895 return NULL_RTX;
5897 arg = CALL_EXPR_ARG (exp, 0);
5898 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5899 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5900 fmt = REAL_MODE_FORMAT (fmode);
5902 arg = builtin_save_expr (arg);
5904 /* Expand the argument yielding a RTX expression. */
5905 temp = expand_normal (arg);
5907 /* Check if the back end provides an insn that handles signbit for the
5908 argument's mode. */
5909 icode = optab_handler (signbit_optab, fmode);
5910 if (icode != CODE_FOR_nothing)
5912 rtx_insn *last = get_last_insn ();
5913 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5914 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5915 return target;
5916 delete_insns_since (last);
5919 /* For floating point formats without a sign bit, implement signbit
5920 as "ARG < 0.0". */
5921 bitpos = fmt->signbit_ro;
5922 if (bitpos < 0)
5924 /* But we can't do this if the format supports signed zero. */
5925 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5927 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5928 build_real (TREE_TYPE (arg), dconst0));
5929 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5932 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5934 imode = int_mode_for_mode (fmode).require ();
5935 temp = gen_lowpart (imode, temp);
5937 else
5939 imode = word_mode;
5940 /* Handle targets with different FP word orders. */
5941 if (FLOAT_WORDS_BIG_ENDIAN)
5942 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5943 else
5944 word = bitpos / BITS_PER_WORD;
5945 temp = operand_subword_force (temp, word, fmode);
5946 bitpos = bitpos % BITS_PER_WORD;
5949 /* Force the intermediate word_mode (or narrower) result into a
5950 register. This avoids attempting to create paradoxical SUBREGs
5951 of floating point modes below. */
5952 temp = force_reg (imode, temp);
5954 /* If the bitpos is within the "result mode" lowpart, the operation
5955 can be implement with a single bitwise AND. Otherwise, we need
5956 a right shift and an AND. */
5958 if (bitpos < GET_MODE_BITSIZE (rmode))
5960 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5962 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5963 temp = gen_lowpart (rmode, temp);
5964 temp = expand_binop (rmode, and_optab, temp,
5965 immed_wide_int_const (mask, rmode),
5966 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5968 else
5970 /* Perform a logical right shift to place the signbit in the least
5971 significant bit, then truncate the result to the desired mode
5972 and mask just this bit. */
5973 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5974 temp = gen_lowpart (rmode, temp);
5975 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5976 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5979 return temp;
5982 /* Expand fork or exec calls. TARGET is the desired target of the
5983 call. EXP is the call. FN is the
5984 identificator of the actual function. IGNORE is nonzero if the
5985 value is to be ignored. */
5987 static rtx
5988 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5990 tree id, decl;
5991 tree call;
5993 /* If we are not profiling, just call the function. */
5994 if (!profile_arc_flag)
5995 return NULL_RTX;
5997 /* Otherwise call the wrapper. This should be equivalent for the rest of
5998 compiler, so the code does not diverge, and the wrapper may run the
5999 code necessary for keeping the profiling sane. */
6001 switch (DECL_FUNCTION_CODE (fn))
6003 case BUILT_IN_FORK:
6004 id = get_identifier ("__gcov_fork");
6005 break;
6007 case BUILT_IN_EXECL:
6008 id = get_identifier ("__gcov_execl");
6009 break;
6011 case BUILT_IN_EXECV:
6012 id = get_identifier ("__gcov_execv");
6013 break;
6015 case BUILT_IN_EXECLP:
6016 id = get_identifier ("__gcov_execlp");
6017 break;
6019 case BUILT_IN_EXECLE:
6020 id = get_identifier ("__gcov_execle");
6021 break;
6023 case BUILT_IN_EXECVP:
6024 id = get_identifier ("__gcov_execvp");
6025 break;
6027 case BUILT_IN_EXECVE:
6028 id = get_identifier ("__gcov_execve");
6029 break;
6031 default:
6032 gcc_unreachable ();
6035 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6036 FUNCTION_DECL, id, TREE_TYPE (fn));
6037 DECL_EXTERNAL (decl) = 1;
6038 TREE_PUBLIC (decl) = 1;
6039 DECL_ARTIFICIAL (decl) = 1;
6040 TREE_NOTHROW (decl) = 1;
6041 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6042 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6043 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6044 return expand_call (call, target, ignore);
6049 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6050 the pointer in these functions is void*, the tree optimizers may remove
6051 casts. The mode computed in expand_builtin isn't reliable either, due
6052 to __sync_bool_compare_and_swap.
6054 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6055 group of builtins. This gives us log2 of the mode size. */
6057 static inline machine_mode
6058 get_builtin_sync_mode (int fcode_diff)
6060 /* The size is not negotiable, so ask not to get BLKmode in return
6061 if the target indicates that a smaller size would be better. */
6062 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6065 /* Expand the memory expression LOC and return the appropriate memory operand
6066 for the builtin_sync operations. */
6068 static rtx
6069 get_builtin_sync_mem (tree loc, machine_mode mode)
6071 rtx addr, mem;
6072 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6073 ? TREE_TYPE (TREE_TYPE (loc))
6074 : TREE_TYPE (loc));
6075 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6077 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6078 addr = convert_memory_address (addr_mode, addr);
6080 /* Note that we explicitly do not want any alias information for this
6081 memory, so that we kill all other live memories. Otherwise we don't
6082 satisfy the full barrier semantics of the intrinsic. */
6083 mem = gen_rtx_MEM (mode, addr);
6085 set_mem_addr_space (mem, addr_space);
6087 mem = validize_mem (mem);
6089 /* The alignment needs to be at least according to that of the mode. */
6090 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6091 get_pointer_alignment (loc)));
6092 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6093 MEM_VOLATILE_P (mem) = 1;
6095 return mem;
6098 /* Make sure an argument is in the right mode.
6099 EXP is the tree argument.
6100 MODE is the mode it should be in. */
6102 static rtx
6103 expand_expr_force_mode (tree exp, machine_mode mode)
6105 rtx val;
6106 machine_mode old_mode;
6108 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6109 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6110 of CONST_INTs, where we know the old_mode only from the call argument. */
6112 old_mode = GET_MODE (val);
6113 if (old_mode == VOIDmode)
6114 old_mode = TYPE_MODE (TREE_TYPE (exp));
6115 val = convert_modes (mode, old_mode, val, 1);
6116 return val;
6120 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6121 EXP is the CALL_EXPR. CODE is the rtx code
6122 that corresponds to the arithmetic or logical operation from the name;
6123 an exception here is that NOT actually means NAND. TARGET is an optional
6124 place for us to store the results; AFTER is true if this is the
6125 fetch_and_xxx form. */
6127 static rtx
6128 expand_builtin_sync_operation (machine_mode mode, tree exp,
6129 enum rtx_code code, bool after,
6130 rtx target)
6132 rtx val, mem;
6133 location_t loc = EXPR_LOCATION (exp);
6135 if (code == NOT && warn_sync_nand)
6137 tree fndecl = get_callee_fndecl (exp);
6138 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6140 static bool warned_f_a_n, warned_n_a_f;
6142 switch (fcode)
6144 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6145 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6146 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6147 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6148 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6149 if (warned_f_a_n)
6150 break;
6152 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6153 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6154 warned_f_a_n = true;
6155 break;
6157 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6158 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6159 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6160 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6161 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6162 if (warned_n_a_f)
6163 break;
6165 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6166 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6167 warned_n_a_f = true;
6168 break;
6170 default:
6171 gcc_unreachable ();
6175 /* Expand the operands. */
6176 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6177 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6179 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6180 after);
6183 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6184 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6185 true if this is the boolean form. TARGET is a place for us to store the
6186 results; this is NOT optional if IS_BOOL is true. */
6188 static rtx
6189 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6190 bool is_bool, rtx target)
6192 rtx old_val, new_val, mem;
6193 rtx *pbool, *poval;
6195 /* Expand the operands. */
6196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6197 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6198 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6200 pbool = poval = NULL;
6201 if (target != const0_rtx)
6203 if (is_bool)
6204 pbool = &target;
6205 else
6206 poval = &target;
6208 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6209 false, MEMMODEL_SYNC_SEQ_CST,
6210 MEMMODEL_SYNC_SEQ_CST))
6211 return NULL_RTX;
6213 return target;
6216 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6217 general form is actually an atomic exchange, and some targets only
6218 support a reduced form with the second argument being a constant 1.
6219 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6220 the results. */
6222 static rtx
6223 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6224 rtx target)
6226 rtx val, mem;
6228 /* Expand the operands. */
6229 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6230 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6232 return expand_sync_lock_test_and_set (target, mem, val);
6235 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6237 static void
6238 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6240 rtx mem;
6242 /* Expand the operands. */
6243 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6245 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6248 /* Given an integer representing an ``enum memmodel'', verify its
6249 correctness and return the memory model enum. */
6251 static enum memmodel
6252 get_memmodel (tree exp)
6254 rtx op;
6255 unsigned HOST_WIDE_INT val;
6256 location_t loc
6257 = expansion_point_location_if_in_system_header (input_location);
6259 /* If the parameter is not a constant, it's a run time value so we'll just
6260 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6261 if (TREE_CODE (exp) != INTEGER_CST)
6262 return MEMMODEL_SEQ_CST;
6264 op = expand_normal (exp);
6266 val = INTVAL (op);
6267 if (targetm.memmodel_check)
6268 val = targetm.memmodel_check (val);
6269 else if (val & ~MEMMODEL_MASK)
6271 warning_at (loc, OPT_Winvalid_memory_model,
6272 "unknown architecture specifier in memory model to builtin");
6273 return MEMMODEL_SEQ_CST;
6276 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6277 if (memmodel_base (val) >= MEMMODEL_LAST)
6279 warning_at (loc, OPT_Winvalid_memory_model,
6280 "invalid memory model argument to builtin");
6281 return MEMMODEL_SEQ_CST;
6284 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6285 be conservative and promote consume to acquire. */
6286 if (val == MEMMODEL_CONSUME)
6287 val = MEMMODEL_ACQUIRE;
6289 return (enum memmodel) val;
6292 /* Expand the __atomic_exchange intrinsic:
6293 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6294 EXP is the CALL_EXPR.
6295 TARGET is an optional place for us to store the results. */
6297 static rtx
6298 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6300 rtx val, mem;
6301 enum memmodel model;
6303 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6305 if (!flag_inline_atomics)
6306 return NULL_RTX;
6308 /* Expand the operands. */
6309 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6310 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6312 return expand_atomic_exchange (target, mem, val, model);
6315 /* Expand the __atomic_compare_exchange intrinsic:
6316 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6317 TYPE desired, BOOL weak,
6318 enum memmodel success,
6319 enum memmodel failure)
6320 EXP is the CALL_EXPR.
6321 TARGET is an optional place for us to store the results. */
6323 static rtx
6324 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6325 rtx target)
6327 rtx expect, desired, mem, oldval;
6328 rtx_code_label *label;
6329 enum memmodel success, failure;
6330 tree weak;
6331 bool is_weak;
6332 location_t loc
6333 = expansion_point_location_if_in_system_header (input_location);
6335 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6336 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6338 if (failure > success)
6340 warning_at (loc, OPT_Winvalid_memory_model,
6341 "failure memory model cannot be stronger than success "
6342 "memory model for %<__atomic_compare_exchange%>");
6343 success = MEMMODEL_SEQ_CST;
6346 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6348 warning_at (loc, OPT_Winvalid_memory_model,
6349 "invalid failure memory model for "
6350 "%<__atomic_compare_exchange%>");
6351 failure = MEMMODEL_SEQ_CST;
6352 success = MEMMODEL_SEQ_CST;
6356 if (!flag_inline_atomics)
6357 return NULL_RTX;
6359 /* Expand the operands. */
6360 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6362 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6363 expect = convert_memory_address (Pmode, expect);
6364 expect = gen_rtx_MEM (mode, expect);
6365 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6367 weak = CALL_EXPR_ARG (exp, 3);
6368 is_weak = false;
6369 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6370 is_weak = true;
6372 if (target == const0_rtx)
6373 target = NULL;
6375 /* Lest the rtl backend create a race condition with an imporoper store
6376 to memory, always create a new pseudo for OLDVAL. */
6377 oldval = NULL;
6379 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6380 is_weak, success, failure))
6381 return NULL_RTX;
6383 /* Conditionally store back to EXPECT, lest we create a race condition
6384 with an improper store to memory. */
6385 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6386 the normal case where EXPECT is totally private, i.e. a register. At
6387 which point the store can be unconditional. */
6388 label = gen_label_rtx ();
6389 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6390 GET_MODE (target), 1, label);
6391 emit_move_insn (expect, oldval);
6392 emit_label (label);
6394 return target;
6397 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6398 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6399 call. The weak parameter must be dropped to match the expected parameter
6400 list and the expected argument changed from value to pointer to memory
6401 slot. */
6403 static void
6404 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6406 unsigned int z;
6407 vec<tree, va_gc> *vec;
6409 vec_alloc (vec, 5);
6410 vec->quick_push (gimple_call_arg (call, 0));
6411 tree expected = gimple_call_arg (call, 1);
6412 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6413 TREE_TYPE (expected));
6414 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6415 if (expd != x)
6416 emit_move_insn (x, expd);
6417 tree v = make_tree (TREE_TYPE (expected), x);
6418 vec->quick_push (build1 (ADDR_EXPR,
6419 build_pointer_type (TREE_TYPE (expected)), v));
6420 vec->quick_push (gimple_call_arg (call, 2));
6421 /* Skip the boolean weak parameter. */
6422 for (z = 4; z < 6; z++)
6423 vec->quick_push (gimple_call_arg (call, z));
6424 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6425 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6426 gcc_assert (bytes_log2 < 5);
6427 built_in_function fncode
6428 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6429 + bytes_log2);
6430 tree fndecl = builtin_decl_explicit (fncode);
6431 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6432 fndecl);
6433 tree exp = build_call_vec (boolean_type_node, fn, vec);
6434 tree lhs = gimple_call_lhs (call);
6435 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6436 if (lhs)
6438 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6439 if (GET_MODE (boolret) != mode)
6440 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6441 x = force_reg (mode, x);
6442 write_complex_part (target, boolret, true);
6443 write_complex_part (target, x, false);
6447 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6449 void
6450 expand_ifn_atomic_compare_exchange (gcall *call)
6452 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6453 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6454 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6455 rtx expect, desired, mem, oldval, boolret;
6456 enum memmodel success, failure;
6457 tree lhs;
6458 bool is_weak;
6459 location_t loc
6460 = expansion_point_location_if_in_system_header (gimple_location (call));
6462 success = get_memmodel (gimple_call_arg (call, 4));
6463 failure = get_memmodel (gimple_call_arg (call, 5));
6465 if (failure > success)
6467 warning_at (loc, OPT_Winvalid_memory_model,
6468 "failure memory model cannot be stronger than success "
6469 "memory model for %<__atomic_compare_exchange%>");
6470 success = MEMMODEL_SEQ_CST;
6473 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6475 warning_at (loc, OPT_Winvalid_memory_model,
6476 "invalid failure memory model for "
6477 "%<__atomic_compare_exchange%>");
6478 failure = MEMMODEL_SEQ_CST;
6479 success = MEMMODEL_SEQ_CST;
6482 if (!flag_inline_atomics)
6484 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6485 return;
6488 /* Expand the operands. */
6489 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6491 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6492 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6494 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6496 boolret = NULL;
6497 oldval = NULL;
6499 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6500 is_weak, success, failure))
6502 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6503 return;
6506 lhs = gimple_call_lhs (call);
6507 if (lhs)
6509 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6510 if (GET_MODE (boolret) != mode)
6511 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6512 write_complex_part (target, boolret, true);
6513 write_complex_part (target, oldval, false);
6517 /* Expand the __atomic_load intrinsic:
6518 TYPE __atomic_load (TYPE *object, enum memmodel)
6519 EXP is the CALL_EXPR.
6520 TARGET is an optional place for us to store the results. */
6522 static rtx
6523 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6525 rtx mem;
6526 enum memmodel model;
6528 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6529 if (is_mm_release (model) || is_mm_acq_rel (model))
6531 location_t loc
6532 = expansion_point_location_if_in_system_header (input_location);
6533 warning_at (loc, OPT_Winvalid_memory_model,
6534 "invalid memory model for %<__atomic_load%>");
6535 model = MEMMODEL_SEQ_CST;
6538 if (!flag_inline_atomics)
6539 return NULL_RTX;
6541 /* Expand the operand. */
6542 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6544 return expand_atomic_load (target, mem, model);
6548 /* Expand the __atomic_store intrinsic:
6549 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6550 EXP is the CALL_EXPR.
6551 TARGET is an optional place for us to store the results. */
6553 static rtx
6554 expand_builtin_atomic_store (machine_mode mode, tree exp)
6556 rtx mem, val;
6557 enum memmodel model;
6559 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6560 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6561 || is_mm_release (model)))
6563 location_t loc
6564 = expansion_point_location_if_in_system_header (input_location);
6565 warning_at (loc, OPT_Winvalid_memory_model,
6566 "invalid memory model for %<__atomic_store%>");
6567 model = MEMMODEL_SEQ_CST;
6570 if (!flag_inline_atomics)
6571 return NULL_RTX;
6573 /* Expand the operands. */
6574 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6575 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6577 return expand_atomic_store (mem, val, model, false);
6580 /* Expand the __atomic_fetch_XXX intrinsic:
6581 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6582 EXP is the CALL_EXPR.
6583 TARGET is an optional place for us to store the results.
6584 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6585 FETCH_AFTER is true if returning the result of the operation.
6586 FETCH_AFTER is false if returning the value before the operation.
6587 IGNORE is true if the result is not used.
6588 EXT_CALL is the correct builtin for an external call if this cannot be
6589 resolved to an instruction sequence. */
6591 static rtx
6592 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6593 enum rtx_code code, bool fetch_after,
6594 bool ignore, enum built_in_function ext_call)
6596 rtx val, mem, ret;
6597 enum memmodel model;
6598 tree fndecl;
6599 tree addr;
6601 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6603 /* Expand the operands. */
6604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6605 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6607 /* Only try generating instructions if inlining is turned on. */
6608 if (flag_inline_atomics)
6610 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6611 if (ret)
6612 return ret;
6615 /* Return if a different routine isn't needed for the library call. */
6616 if (ext_call == BUILT_IN_NONE)
6617 return NULL_RTX;
6619 /* Change the call to the specified function. */
6620 fndecl = get_callee_fndecl (exp);
6621 addr = CALL_EXPR_FN (exp);
6622 STRIP_NOPS (addr);
6624 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6625 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6627 /* If we will emit code after the call, the call cannot be a tail call.
6628 If it is emitted as a tail call, a barrier is emitted after it, and
6629 then all trailing code is removed. */
6630 if (!ignore)
6631 CALL_EXPR_TAILCALL (exp) = 0;
6633 /* Expand the call here so we can emit trailing code. */
6634 ret = expand_call (exp, target, ignore);
6636 /* Replace the original function just in case it matters. */
6637 TREE_OPERAND (addr, 0) = fndecl;
6639 /* Then issue the arithmetic correction to return the right result. */
6640 if (!ignore)
6642 if (code == NOT)
6644 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6645 OPTAB_LIB_WIDEN);
6646 ret = expand_simple_unop (mode, NOT, ret, target, true);
6648 else
6649 ret = expand_simple_binop (mode, code, ret, val, target, true,
6650 OPTAB_LIB_WIDEN);
6652 return ret;
6655 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6657 void
6658 expand_ifn_atomic_bit_test_and (gcall *call)
6660 tree ptr = gimple_call_arg (call, 0);
6661 tree bit = gimple_call_arg (call, 1);
6662 tree flag = gimple_call_arg (call, 2);
6663 tree lhs = gimple_call_lhs (call);
6664 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6665 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6666 enum rtx_code code;
6667 optab optab;
6668 class expand_operand ops[5];
6670 gcc_assert (flag_inline_atomics);
6672 if (gimple_call_num_args (call) == 4)
6673 model = get_memmodel (gimple_call_arg (call, 3));
6675 rtx mem = get_builtin_sync_mem (ptr, mode);
6676 rtx val = expand_expr_force_mode (bit, mode);
6678 switch (gimple_call_internal_fn (call))
6680 case IFN_ATOMIC_BIT_TEST_AND_SET:
6681 code = IOR;
6682 optab = atomic_bit_test_and_set_optab;
6683 break;
6684 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6685 code = XOR;
6686 optab = atomic_bit_test_and_complement_optab;
6687 break;
6688 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6689 code = AND;
6690 optab = atomic_bit_test_and_reset_optab;
6691 break;
6692 default:
6693 gcc_unreachable ();
6696 if (lhs == NULL_TREE)
6698 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6699 val, NULL_RTX, true, OPTAB_DIRECT);
6700 if (code == AND)
6701 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6702 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6703 return;
6706 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6707 enum insn_code icode = direct_optab_handler (optab, mode);
6708 gcc_assert (icode != CODE_FOR_nothing);
6709 create_output_operand (&ops[0], target, mode);
6710 create_fixed_operand (&ops[1], mem);
6711 create_convert_operand_to (&ops[2], val, mode, true);
6712 create_integer_operand (&ops[3], model);
6713 create_integer_operand (&ops[4], integer_onep (flag));
6714 if (maybe_expand_insn (icode, 5, ops))
6715 return;
6717 rtx bitval = val;
6718 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6719 val, NULL_RTX, true, OPTAB_DIRECT);
6720 rtx maskval = val;
6721 if (code == AND)
6722 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6723 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6724 code, model, false);
6725 if (integer_onep (flag))
6727 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6728 NULL_RTX, true, OPTAB_DIRECT);
6729 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6730 true, OPTAB_DIRECT);
6732 else
6733 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6734 OPTAB_DIRECT);
6735 if (result != target)
6736 emit_move_insn (target, result);
6739 /* Expand an atomic clear operation.
6740 void _atomic_clear (BOOL *obj, enum memmodel)
6741 EXP is the call expression. */
6743 static rtx
6744 expand_builtin_atomic_clear (tree exp)
6746 machine_mode mode;
6747 rtx mem, ret;
6748 enum memmodel model;
6750 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6751 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6752 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6754 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6756 location_t loc
6757 = expansion_point_location_if_in_system_header (input_location);
6758 warning_at (loc, OPT_Winvalid_memory_model,
6759 "invalid memory model for %<__atomic_store%>");
6760 model = MEMMODEL_SEQ_CST;
6763 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6764 Failing that, a store is issued by __atomic_store. The only way this can
6765 fail is if the bool type is larger than a word size. Unlikely, but
6766 handle it anyway for completeness. Assume a single threaded model since
6767 there is no atomic support in this case, and no barriers are required. */
6768 ret = expand_atomic_store (mem, const0_rtx, model, true);
6769 if (!ret)
6770 emit_move_insn (mem, const0_rtx);
6771 return const0_rtx;
6774 /* Expand an atomic test_and_set operation.
6775 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6776 EXP is the call expression. */
6778 static rtx
6779 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6781 rtx mem;
6782 enum memmodel model;
6783 machine_mode mode;
6785 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6786 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6787 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6789 return expand_atomic_test_and_set (target, mem, model);
6793 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6794 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6796 static tree
6797 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6799 int size;
6800 machine_mode mode;
6801 unsigned int mode_align, type_align;
6803 if (TREE_CODE (arg0) != INTEGER_CST)
6804 return NULL_TREE;
6806 /* We need a corresponding integer mode for the access to be lock-free. */
6807 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6808 if (!int_mode_for_size (size, 0).exists (&mode))
6809 return boolean_false_node;
6811 mode_align = GET_MODE_ALIGNMENT (mode);
6813 if (TREE_CODE (arg1) == INTEGER_CST)
6815 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6817 /* Either this argument is null, or it's a fake pointer encoding
6818 the alignment of the object. */
6819 val = least_bit_hwi (val);
6820 val *= BITS_PER_UNIT;
6822 if (val == 0 || mode_align < val)
6823 type_align = mode_align;
6824 else
6825 type_align = val;
6827 else
6829 tree ttype = TREE_TYPE (arg1);
6831 /* This function is usually invoked and folded immediately by the front
6832 end before anything else has a chance to look at it. The pointer
6833 parameter at this point is usually cast to a void *, so check for that
6834 and look past the cast. */
6835 if (CONVERT_EXPR_P (arg1)
6836 && POINTER_TYPE_P (ttype)
6837 && VOID_TYPE_P (TREE_TYPE (ttype))
6838 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6839 arg1 = TREE_OPERAND (arg1, 0);
6841 ttype = TREE_TYPE (arg1);
6842 gcc_assert (POINTER_TYPE_P (ttype));
6844 /* Get the underlying type of the object. */
6845 ttype = TREE_TYPE (ttype);
6846 type_align = TYPE_ALIGN (ttype);
6849 /* If the object has smaller alignment, the lock free routines cannot
6850 be used. */
6851 if (type_align < mode_align)
6852 return boolean_false_node;
6854 /* Check if a compare_and_swap pattern exists for the mode which represents
6855 the required size. The pattern is not allowed to fail, so the existence
6856 of the pattern indicates support is present. Also require that an
6857 atomic load exists for the required size. */
6858 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6859 return boolean_true_node;
6860 else
6861 return boolean_false_node;
6864 /* Return true if the parameters to call EXP represent an object which will
6865 always generate lock free instructions. The first argument represents the
6866 size of the object, and the second parameter is a pointer to the object
6867 itself. If NULL is passed for the object, then the result is based on
6868 typical alignment for an object of the specified size. Otherwise return
6869 false. */
6871 static rtx
6872 expand_builtin_atomic_always_lock_free (tree exp)
6874 tree size;
6875 tree arg0 = CALL_EXPR_ARG (exp, 0);
6876 tree arg1 = CALL_EXPR_ARG (exp, 1);
6878 if (TREE_CODE (arg0) != INTEGER_CST)
6880 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6881 return const0_rtx;
6884 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6885 if (size == boolean_true_node)
6886 return const1_rtx;
6887 return const0_rtx;
6890 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6891 is lock free on this architecture. */
6893 static tree
6894 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6896 if (!flag_inline_atomics)
6897 return NULL_TREE;
6899 /* If it isn't always lock free, don't generate a result. */
6900 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6901 return boolean_true_node;
6903 return NULL_TREE;
6906 /* Return true if the parameters to call EXP represent an object which will
6907 always generate lock free instructions. The first argument represents the
6908 size of the object, and the second parameter is a pointer to the object
6909 itself. If NULL is passed for the object, then the result is based on
6910 typical alignment for an object of the specified size. Otherwise return
6911 NULL*/
6913 static rtx
6914 expand_builtin_atomic_is_lock_free (tree exp)
6916 tree size;
6917 tree arg0 = CALL_EXPR_ARG (exp, 0);
6918 tree arg1 = CALL_EXPR_ARG (exp, 1);
6920 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6922 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6923 return NULL_RTX;
6926 if (!flag_inline_atomics)
6927 return NULL_RTX;
6929 /* If the value is known at compile time, return the RTX for it. */
6930 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6931 if (size == boolean_true_node)
6932 return const1_rtx;
6934 return NULL_RTX;
6937 /* Expand the __atomic_thread_fence intrinsic:
6938 void __atomic_thread_fence (enum memmodel)
6939 EXP is the CALL_EXPR. */
6941 static void
6942 expand_builtin_atomic_thread_fence (tree exp)
6944 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6945 expand_mem_thread_fence (model);
6948 /* Expand the __atomic_signal_fence intrinsic:
6949 void __atomic_signal_fence (enum memmodel)
6950 EXP is the CALL_EXPR. */
6952 static void
6953 expand_builtin_atomic_signal_fence (tree exp)
6955 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6956 expand_mem_signal_fence (model);
6959 /* Expand the __sync_synchronize intrinsic. */
6961 static void
6962 expand_builtin_sync_synchronize (void)
6964 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6967 static rtx
6968 expand_builtin_thread_pointer (tree exp, rtx target)
6970 enum insn_code icode;
6971 if (!validate_arglist (exp, VOID_TYPE))
6972 return const0_rtx;
6973 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6974 if (icode != CODE_FOR_nothing)
6976 class expand_operand op;
6977 /* If the target is not sutitable then create a new target. */
6978 if (target == NULL_RTX
6979 || !REG_P (target)
6980 || GET_MODE (target) != Pmode)
6981 target = gen_reg_rtx (Pmode);
6982 create_output_operand (&op, target, Pmode);
6983 expand_insn (icode, 1, &op);
6984 return target;
6986 error ("%<__builtin_thread_pointer%> is not supported on this target");
6987 return const0_rtx;
6990 static void
6991 expand_builtin_set_thread_pointer (tree exp)
6993 enum insn_code icode;
6994 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6995 return;
6996 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6997 if (icode != CODE_FOR_nothing)
6999 class expand_operand op;
7000 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7001 Pmode, EXPAND_NORMAL);
7002 create_input_operand (&op, val, Pmode);
7003 expand_insn (icode, 1, &op);
7004 return;
7006 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7010 /* Emit code to restore the current value of stack. */
7012 static void
7013 expand_stack_restore (tree var)
7015 rtx_insn *prev;
7016 rtx sa = expand_normal (var);
7018 sa = convert_memory_address (Pmode, sa);
7020 prev = get_last_insn ();
7021 emit_stack_restore (SAVE_BLOCK, sa);
7023 record_new_stack_level ();
7025 fixup_args_size_notes (prev, get_last_insn (), 0);
7028 /* Emit code to save the current value of stack. */
7030 static rtx
7031 expand_stack_save (void)
7033 rtx ret = NULL_RTX;
7035 emit_stack_save (SAVE_BLOCK, &ret);
7036 return ret;
7039 /* Emit code to get the openacc gang, worker or vector id or size. */
7041 static rtx
7042 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7044 const char *name;
7045 rtx fallback_retval;
7046 rtx_insn *(*gen_fn) (rtx, rtx);
7047 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7049 case BUILT_IN_GOACC_PARLEVEL_ID:
7050 name = "__builtin_goacc_parlevel_id";
7051 fallback_retval = const0_rtx;
7052 gen_fn = targetm.gen_oacc_dim_pos;
7053 break;
7054 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7055 name = "__builtin_goacc_parlevel_size";
7056 fallback_retval = const1_rtx;
7057 gen_fn = targetm.gen_oacc_dim_size;
7058 break;
7059 default:
7060 gcc_unreachable ();
7063 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7065 error ("%qs only supported in OpenACC code", name);
7066 return const0_rtx;
7069 tree arg = CALL_EXPR_ARG (exp, 0);
7070 if (TREE_CODE (arg) != INTEGER_CST)
7072 error ("non-constant argument 0 to %qs", name);
7073 return const0_rtx;
7076 int dim = TREE_INT_CST_LOW (arg);
7077 switch (dim)
7079 case GOMP_DIM_GANG:
7080 case GOMP_DIM_WORKER:
7081 case GOMP_DIM_VECTOR:
7082 break;
7083 default:
7084 error ("illegal argument 0 to %qs", name);
7085 return const0_rtx;
7088 if (ignore)
7089 return target;
7091 if (target == NULL_RTX)
7092 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7094 if (!targetm.have_oacc_dim_size ())
7096 emit_move_insn (target, fallback_retval);
7097 return target;
7100 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7101 emit_insn (gen_fn (reg, GEN_INT (dim)));
7102 if (reg != target)
7103 emit_move_insn (target, reg);
7105 return target;
7108 /* Expand a string compare operation using a sequence of char comparison
7109 to get rid of the calling overhead, with result going to TARGET if
7110 that's convenient.
7112 VAR_STR is the variable string source;
7113 CONST_STR is the constant string source;
7114 LENGTH is the number of chars to compare;
7115 CONST_STR_N indicates which source string is the constant string;
7116 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7118 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7120 target = (int) (unsigned char) var_str[0]
7121 - (int) (unsigned char) const_str[0];
7122 if (target != 0)
7123 goto ne_label;
7125 target = (int) (unsigned char) var_str[length - 2]
7126 - (int) (unsigned char) const_str[length - 2];
7127 if (target != 0)
7128 goto ne_label;
7129 target = (int) (unsigned char) var_str[length - 1]
7130 - (int) (unsigned char) const_str[length - 1];
7131 ne_label:
7134 static rtx
7135 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7136 unsigned HOST_WIDE_INT length,
7137 int const_str_n, machine_mode mode)
7139 HOST_WIDE_INT offset = 0;
7140 rtx var_rtx_array
7141 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7142 rtx var_rtx = NULL_RTX;
7143 rtx const_rtx = NULL_RTX;
7144 rtx result = target ? target : gen_reg_rtx (mode);
7145 rtx_code_label *ne_label = gen_label_rtx ();
7146 tree unit_type_node = unsigned_char_type_node;
7147 scalar_int_mode unit_mode
7148 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7150 start_sequence ();
7152 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7154 var_rtx
7155 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7156 const_rtx = c_readstr (const_str + offset, unit_mode);
7157 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7158 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7160 op0 = convert_modes (mode, unit_mode, op0, 1);
7161 op1 = convert_modes (mode, unit_mode, op1, 1);
7162 result = expand_simple_binop (mode, MINUS, op0, op1,
7163 result, 1, OPTAB_WIDEN);
7164 if (i < length - 1)
7165 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7166 mode, true, ne_label);
7167 offset += GET_MODE_SIZE (unit_mode);
7170 emit_label (ne_label);
7171 rtx_insn *insns = get_insns ();
7172 end_sequence ();
7173 emit_insn (insns);
7175 return result;
7178 /* Inline expansion a call to str(n)cmp, with result going to
7179 TARGET if that's convenient.
7180 If the call is not been inlined, return NULL_RTX. */
7181 static rtx
7182 inline_expand_builtin_string_cmp (tree exp, rtx target)
7184 tree fndecl = get_callee_fndecl (exp);
7185 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7186 unsigned HOST_WIDE_INT length = 0;
7187 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7189 /* Do NOT apply this inlining expansion when optimizing for size or
7190 optimization level below 2. */
7191 if (optimize < 2 || optimize_insn_for_size_p ())
7192 return NULL_RTX;
7194 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7195 || fcode == BUILT_IN_STRNCMP
7196 || fcode == BUILT_IN_MEMCMP);
7198 /* On a target where the type of the call (int) has same or narrower presicion
7199 than unsigned char, give up the inlining expansion. */
7200 if (TYPE_PRECISION (unsigned_char_type_node)
7201 >= TYPE_PRECISION (TREE_TYPE (exp)))
7202 return NULL_RTX;
7204 tree arg1 = CALL_EXPR_ARG (exp, 0);
7205 tree arg2 = CALL_EXPR_ARG (exp, 1);
7206 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7208 unsigned HOST_WIDE_INT len1 = 0;
7209 unsigned HOST_WIDE_INT len2 = 0;
7210 unsigned HOST_WIDE_INT len3 = 0;
7212 const char *src_str1 = c_getstr (arg1, &len1);
7213 const char *src_str2 = c_getstr (arg2, &len2);
7215 /* If neither strings is constant string, the call is not qualify. */
7216 if (!src_str1 && !src_str2)
7217 return NULL_RTX;
7219 /* For strncmp, if the length is not a const, not qualify. */
7220 if (is_ncmp)
7222 if (!tree_fits_uhwi_p (len3_tree))
7223 return NULL_RTX;
7224 else
7225 len3 = tree_to_uhwi (len3_tree);
7228 if (src_str1 != NULL)
7229 len1 = strnlen (src_str1, len1) + 1;
7231 if (src_str2 != NULL)
7232 len2 = strnlen (src_str2, len2) + 1;
7234 int const_str_n = 0;
7235 if (!len1)
7236 const_str_n = 2;
7237 else if (!len2)
7238 const_str_n = 1;
7239 else if (len2 > len1)
7240 const_str_n = 1;
7241 else
7242 const_str_n = 2;
7244 gcc_checking_assert (const_str_n > 0);
7245 length = (const_str_n == 1) ? len1 : len2;
7247 if (is_ncmp && len3 < length)
7248 length = len3;
7250 /* If the length of the comparision is larger than the threshold,
7251 do nothing. */
7252 if (length > (unsigned HOST_WIDE_INT)
7253 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7254 return NULL_RTX;
7256 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7258 /* Now, start inline expansion the call. */
7259 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7260 (const_str_n == 1) ? src_str1 : src_str2, length,
7261 const_str_n, mode);
7264 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7265 represents the size of the first argument to that call, or VOIDmode
7266 if the argument is a pointer. IGNORE will be true if the result
7267 isn't used. */
7268 static rtx
7269 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7270 bool ignore)
7272 rtx val, failsafe;
7273 unsigned nargs = call_expr_nargs (exp);
7275 tree arg0 = CALL_EXPR_ARG (exp, 0);
7277 if (mode == VOIDmode)
7279 mode = TYPE_MODE (TREE_TYPE (arg0));
7280 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7283 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7285 /* An optional second argument can be used as a failsafe value on
7286 some machines. If it isn't present, then the failsafe value is
7287 assumed to be 0. */
7288 if (nargs > 1)
7290 tree arg1 = CALL_EXPR_ARG (exp, 1);
7291 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7293 else
7294 failsafe = const0_rtx;
7296 /* If the result isn't used, the behavior is undefined. It would be
7297 nice to emit a warning here, but path splitting means this might
7298 happen with legitimate code. So simply drop the builtin
7299 expansion in that case; we've handled any side-effects above. */
7300 if (ignore)
7301 return const0_rtx;
7303 /* If we don't have a suitable target, create one to hold the result. */
7304 if (target == NULL || GET_MODE (target) != mode)
7305 target = gen_reg_rtx (mode);
7307 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7308 val = convert_modes (mode, VOIDmode, val, false);
7310 return targetm.speculation_safe_value (mode, target, val, failsafe);
7313 /* Expand an expression EXP that calls a built-in function,
7314 with result going to TARGET if that's convenient
7315 (and in mode MODE if that's convenient).
7316 SUBTARGET may be used as the target for computing one of EXP's operands.
7317 IGNORE is nonzero if the value is to be ignored. */
7320 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7321 int ignore)
7323 tree fndecl = get_callee_fndecl (exp);
7324 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7325 int flags;
7327 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7328 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7330 /* When ASan is enabled, we don't want to expand some memory/string
7331 builtins and rely on libsanitizer's hooks. This allows us to avoid
7332 redundant checks and be sure, that possible overflow will be detected
7333 by ASan. */
7335 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7336 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7337 return expand_call (exp, target, ignore);
7339 /* When not optimizing, generate calls to library functions for a certain
7340 set of builtins. */
7341 if (!optimize
7342 && !called_as_built_in (fndecl)
7343 && fcode != BUILT_IN_FORK
7344 && fcode != BUILT_IN_EXECL
7345 && fcode != BUILT_IN_EXECV
7346 && fcode != BUILT_IN_EXECLP
7347 && fcode != BUILT_IN_EXECLE
7348 && fcode != BUILT_IN_EXECVP
7349 && fcode != BUILT_IN_EXECVE
7350 && !ALLOCA_FUNCTION_CODE_P (fcode)
7351 && fcode != BUILT_IN_FREE)
7352 return expand_call (exp, target, ignore);
7354 /* The built-in function expanders test for target == const0_rtx
7355 to determine whether the function's result will be ignored. */
7356 if (ignore)
7357 target = const0_rtx;
7359 /* If the result of a pure or const built-in function is ignored, and
7360 none of its arguments are volatile, we can avoid expanding the
7361 built-in call and just evaluate the arguments for side-effects. */
7362 if (target == const0_rtx
7363 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7364 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7366 bool volatilep = false;
7367 tree arg;
7368 call_expr_arg_iterator iter;
7370 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7371 if (TREE_THIS_VOLATILE (arg))
7373 volatilep = true;
7374 break;
7377 if (! volatilep)
7379 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7380 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7381 return const0_rtx;
7385 switch (fcode)
7387 CASE_FLT_FN (BUILT_IN_FABS):
7388 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7389 case BUILT_IN_FABSD32:
7390 case BUILT_IN_FABSD64:
7391 case BUILT_IN_FABSD128:
7392 target = expand_builtin_fabs (exp, target, subtarget);
7393 if (target)
7394 return target;
7395 break;
7397 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7398 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7399 target = expand_builtin_copysign (exp, target, subtarget);
7400 if (target)
7401 return target;
7402 break;
7404 /* Just do a normal library call if we were unable to fold
7405 the values. */
7406 CASE_FLT_FN (BUILT_IN_CABS):
7407 break;
7409 CASE_FLT_FN (BUILT_IN_FMA):
7410 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7411 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7412 if (target)
7413 return target;
7414 break;
7416 CASE_FLT_FN (BUILT_IN_ILOGB):
7417 if (! flag_unsafe_math_optimizations)
7418 break;
7419 gcc_fallthrough ();
7420 CASE_FLT_FN (BUILT_IN_ISINF):
7421 CASE_FLT_FN (BUILT_IN_FINITE):
7422 case BUILT_IN_ISFINITE:
7423 case BUILT_IN_ISNORMAL:
7424 target = expand_builtin_interclass_mathfn (exp, target);
7425 if (target)
7426 return target;
7427 break;
7429 CASE_FLT_FN (BUILT_IN_ICEIL):
7430 CASE_FLT_FN (BUILT_IN_LCEIL):
7431 CASE_FLT_FN (BUILT_IN_LLCEIL):
7432 CASE_FLT_FN (BUILT_IN_LFLOOR):
7433 CASE_FLT_FN (BUILT_IN_IFLOOR):
7434 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7435 target = expand_builtin_int_roundingfn (exp, target);
7436 if (target)
7437 return target;
7438 break;
7440 CASE_FLT_FN (BUILT_IN_IRINT):
7441 CASE_FLT_FN (BUILT_IN_LRINT):
7442 CASE_FLT_FN (BUILT_IN_LLRINT):
7443 CASE_FLT_FN (BUILT_IN_IROUND):
7444 CASE_FLT_FN (BUILT_IN_LROUND):
7445 CASE_FLT_FN (BUILT_IN_LLROUND):
7446 target = expand_builtin_int_roundingfn_2 (exp, target);
7447 if (target)
7448 return target;
7449 break;
7451 CASE_FLT_FN (BUILT_IN_POWI):
7452 target = expand_builtin_powi (exp, target);
7453 if (target)
7454 return target;
7455 break;
7457 CASE_FLT_FN (BUILT_IN_CEXPI):
7458 target = expand_builtin_cexpi (exp, target);
7459 gcc_assert (target);
7460 return target;
7462 CASE_FLT_FN (BUILT_IN_SIN):
7463 CASE_FLT_FN (BUILT_IN_COS):
7464 if (! flag_unsafe_math_optimizations)
7465 break;
7466 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7467 if (target)
7468 return target;
7469 break;
7471 CASE_FLT_FN (BUILT_IN_SINCOS):
7472 if (! flag_unsafe_math_optimizations)
7473 break;
7474 target = expand_builtin_sincos (exp);
7475 if (target)
7476 return target;
7477 break;
7479 case BUILT_IN_APPLY_ARGS:
7480 return expand_builtin_apply_args ();
7482 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7483 FUNCTION with a copy of the parameters described by
7484 ARGUMENTS, and ARGSIZE. It returns a block of memory
7485 allocated on the stack into which is stored all the registers
7486 that might possibly be used for returning the result of a
7487 function. ARGUMENTS is the value returned by
7488 __builtin_apply_args. ARGSIZE is the number of bytes of
7489 arguments that must be copied. ??? How should this value be
7490 computed? We'll also need a safe worst case value for varargs
7491 functions. */
7492 case BUILT_IN_APPLY:
7493 if (!validate_arglist (exp, POINTER_TYPE,
7494 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7495 && !validate_arglist (exp, REFERENCE_TYPE,
7496 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7497 return const0_rtx;
7498 else
7500 rtx ops[3];
7502 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7503 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7504 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7506 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7509 /* __builtin_return (RESULT) causes the function to return the
7510 value described by RESULT. RESULT is address of the block of
7511 memory returned by __builtin_apply. */
7512 case BUILT_IN_RETURN:
7513 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7514 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7515 return const0_rtx;
7517 case BUILT_IN_SAVEREGS:
7518 return expand_builtin_saveregs ();
7520 case BUILT_IN_VA_ARG_PACK:
7521 /* All valid uses of __builtin_va_arg_pack () are removed during
7522 inlining. */
7523 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7524 return const0_rtx;
7526 case BUILT_IN_VA_ARG_PACK_LEN:
7527 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7528 inlining. */
7529 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7530 return const0_rtx;
7532 /* Return the address of the first anonymous stack arg. */
7533 case BUILT_IN_NEXT_ARG:
7534 if (fold_builtin_next_arg (exp, false))
7535 return const0_rtx;
7536 return expand_builtin_next_arg ();
7538 case BUILT_IN_CLEAR_CACHE:
7539 target = expand_builtin___clear_cache (exp);
7540 if (target)
7541 return target;
7542 break;
7544 case BUILT_IN_CLASSIFY_TYPE:
7545 return expand_builtin_classify_type (exp);
7547 case BUILT_IN_CONSTANT_P:
7548 return const0_rtx;
7550 case BUILT_IN_FRAME_ADDRESS:
7551 case BUILT_IN_RETURN_ADDRESS:
7552 return expand_builtin_frame_address (fndecl, exp);
7554 /* Returns the address of the area where the structure is returned.
7555 0 otherwise. */
7556 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7557 if (call_expr_nargs (exp) != 0
7558 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7559 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7560 return const0_rtx;
7561 else
7562 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7564 CASE_BUILT_IN_ALLOCA:
7565 target = expand_builtin_alloca (exp);
7566 if (target)
7567 return target;
7568 break;
7570 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7571 return expand_asan_emit_allocas_unpoison (exp);
7573 case BUILT_IN_STACK_SAVE:
7574 return expand_stack_save ();
7576 case BUILT_IN_STACK_RESTORE:
7577 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7578 return const0_rtx;
7580 case BUILT_IN_BSWAP16:
7581 case BUILT_IN_BSWAP32:
7582 case BUILT_IN_BSWAP64:
7583 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7584 if (target)
7585 return target;
7586 break;
7588 CASE_INT_FN (BUILT_IN_FFS):
7589 target = expand_builtin_unop (target_mode, exp, target,
7590 subtarget, ffs_optab);
7591 if (target)
7592 return target;
7593 break;
7595 CASE_INT_FN (BUILT_IN_CLZ):
7596 target = expand_builtin_unop (target_mode, exp, target,
7597 subtarget, clz_optab);
7598 if (target)
7599 return target;
7600 break;
7602 CASE_INT_FN (BUILT_IN_CTZ):
7603 target = expand_builtin_unop (target_mode, exp, target,
7604 subtarget, ctz_optab);
7605 if (target)
7606 return target;
7607 break;
7609 CASE_INT_FN (BUILT_IN_CLRSB):
7610 target = expand_builtin_unop (target_mode, exp, target,
7611 subtarget, clrsb_optab);
7612 if (target)
7613 return target;
7614 break;
7616 CASE_INT_FN (BUILT_IN_POPCOUNT):
7617 target = expand_builtin_unop (target_mode, exp, target,
7618 subtarget, popcount_optab);
7619 if (target)
7620 return target;
7621 break;
7623 CASE_INT_FN (BUILT_IN_PARITY):
7624 target = expand_builtin_unop (target_mode, exp, target,
7625 subtarget, parity_optab);
7626 if (target)
7627 return target;
7628 break;
7630 case BUILT_IN_STRLEN:
7631 target = expand_builtin_strlen (exp, target, target_mode);
7632 if (target)
7633 return target;
7634 break;
7636 case BUILT_IN_STRNLEN:
7637 target = expand_builtin_strnlen (exp, target, target_mode);
7638 if (target)
7639 return target;
7640 break;
7642 case BUILT_IN_STRCAT:
7643 target = expand_builtin_strcat (exp, target);
7644 if (target)
7645 return target;
7646 break;
7648 case BUILT_IN_STRCPY:
7649 target = expand_builtin_strcpy (exp, target);
7650 if (target)
7651 return target;
7652 break;
7654 case BUILT_IN_STRNCAT:
7655 target = expand_builtin_strncat (exp, target);
7656 if (target)
7657 return target;
7658 break;
7660 case BUILT_IN_STRNCPY:
7661 target = expand_builtin_strncpy (exp, target);
7662 if (target)
7663 return target;
7664 break;
7666 case BUILT_IN_STPCPY:
7667 target = expand_builtin_stpcpy (exp, target, mode);
7668 if (target)
7669 return target;
7670 break;
7672 case BUILT_IN_STPNCPY:
7673 target = expand_builtin_stpncpy (exp, target);
7674 if (target)
7675 return target;
7676 break;
7678 case BUILT_IN_MEMCHR:
7679 target = expand_builtin_memchr (exp, target);
7680 if (target)
7681 return target;
7682 break;
7684 case BUILT_IN_MEMCPY:
7685 target = expand_builtin_memcpy (exp, target);
7686 if (target)
7687 return target;
7688 break;
7690 case BUILT_IN_MEMMOVE:
7691 target = expand_builtin_memmove (exp, target);
7692 if (target)
7693 return target;
7694 break;
7696 case BUILT_IN_MEMPCPY:
7697 target = expand_builtin_mempcpy (exp, target);
7698 if (target)
7699 return target;
7700 break;
7702 case BUILT_IN_MEMSET:
7703 target = expand_builtin_memset (exp, target, mode);
7704 if (target)
7705 return target;
7706 break;
7708 case BUILT_IN_BZERO:
7709 target = expand_builtin_bzero (exp);
7710 if (target)
7711 return target;
7712 break;
7714 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7715 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7716 when changing it to a strcmp call. */
7717 case BUILT_IN_STRCMP_EQ:
7718 target = expand_builtin_memcmp (exp, target, true);
7719 if (target)
7720 return target;
7722 /* Change this call back to a BUILT_IN_STRCMP. */
7723 TREE_OPERAND (exp, 1)
7724 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7726 /* Delete the last parameter. */
7727 unsigned int i;
7728 vec<tree, va_gc> *arg_vec;
7729 vec_alloc (arg_vec, 2);
7730 for (i = 0; i < 2; i++)
7731 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7732 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7733 /* FALLTHROUGH */
7735 case BUILT_IN_STRCMP:
7736 target = expand_builtin_strcmp (exp, target);
7737 if (target)
7738 return target;
7739 break;
7741 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7742 back to a BUILT_IN_STRNCMP. */
7743 case BUILT_IN_STRNCMP_EQ:
7744 target = expand_builtin_memcmp (exp, target, true);
7745 if (target)
7746 return target;
7748 /* Change it back to a BUILT_IN_STRNCMP. */
7749 TREE_OPERAND (exp, 1)
7750 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7751 /* FALLTHROUGH */
7753 case BUILT_IN_STRNCMP:
7754 target = expand_builtin_strncmp (exp, target, mode);
7755 if (target)
7756 return target;
7757 break;
7759 case BUILT_IN_BCMP:
7760 case BUILT_IN_MEMCMP:
7761 case BUILT_IN_MEMCMP_EQ:
7762 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7763 if (target)
7764 return target;
7765 if (fcode == BUILT_IN_MEMCMP_EQ)
7767 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7768 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7770 break;
7772 case BUILT_IN_SETJMP:
7773 /* This should have been lowered to the builtins below. */
7774 gcc_unreachable ();
7776 case BUILT_IN_SETJMP_SETUP:
7777 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7778 and the receiver label. */
7779 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7781 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7782 VOIDmode, EXPAND_NORMAL);
7783 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7784 rtx_insn *label_r = label_rtx (label);
7786 /* This is copied from the handling of non-local gotos. */
7787 expand_builtin_setjmp_setup (buf_addr, label_r);
7788 nonlocal_goto_handler_labels
7789 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7790 nonlocal_goto_handler_labels);
7791 /* ??? Do not let expand_label treat us as such since we would
7792 not want to be both on the list of non-local labels and on
7793 the list of forced labels. */
7794 FORCED_LABEL (label) = 0;
7795 return const0_rtx;
7797 break;
7799 case BUILT_IN_SETJMP_RECEIVER:
7800 /* __builtin_setjmp_receiver is passed the receiver label. */
7801 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7803 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7804 rtx_insn *label_r = label_rtx (label);
7806 expand_builtin_setjmp_receiver (label_r);
7807 return const0_rtx;
7809 break;
7811 /* __builtin_longjmp is passed a pointer to an array of five words.
7812 It's similar to the C library longjmp function but works with
7813 __builtin_setjmp above. */
7814 case BUILT_IN_LONGJMP:
7815 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7817 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7818 VOIDmode, EXPAND_NORMAL);
7819 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7821 if (value != const1_rtx)
7823 error ("%<__builtin_longjmp%> second argument must be 1");
7824 return const0_rtx;
7827 expand_builtin_longjmp (buf_addr, value);
7828 return const0_rtx;
7830 break;
7832 case BUILT_IN_NONLOCAL_GOTO:
7833 target = expand_builtin_nonlocal_goto (exp);
7834 if (target)
7835 return target;
7836 break;
7838 /* This updates the setjmp buffer that is its argument with the value
7839 of the current stack pointer. */
7840 case BUILT_IN_UPDATE_SETJMP_BUF:
7841 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7843 rtx buf_addr
7844 = expand_normal (CALL_EXPR_ARG (exp, 0));
7846 expand_builtin_update_setjmp_buf (buf_addr);
7847 return const0_rtx;
7849 break;
7851 case BUILT_IN_TRAP:
7852 expand_builtin_trap ();
7853 return const0_rtx;
7855 case BUILT_IN_UNREACHABLE:
7856 expand_builtin_unreachable ();
7857 return const0_rtx;
7859 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7860 case BUILT_IN_SIGNBITD32:
7861 case BUILT_IN_SIGNBITD64:
7862 case BUILT_IN_SIGNBITD128:
7863 target = expand_builtin_signbit (exp, target);
7864 if (target)
7865 return target;
7866 break;
7868 /* Various hooks for the DWARF 2 __throw routine. */
7869 case BUILT_IN_UNWIND_INIT:
7870 expand_builtin_unwind_init ();
7871 return const0_rtx;
7872 case BUILT_IN_DWARF_CFA:
7873 return virtual_cfa_rtx;
7874 #ifdef DWARF2_UNWIND_INFO
7875 case BUILT_IN_DWARF_SP_COLUMN:
7876 return expand_builtin_dwarf_sp_column ();
7877 case BUILT_IN_INIT_DWARF_REG_SIZES:
7878 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7879 return const0_rtx;
7880 #endif
7881 case BUILT_IN_FROB_RETURN_ADDR:
7882 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7883 case BUILT_IN_EXTRACT_RETURN_ADDR:
7884 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7885 case BUILT_IN_EH_RETURN:
7886 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7887 CALL_EXPR_ARG (exp, 1));
7888 return const0_rtx;
7889 case BUILT_IN_EH_RETURN_DATA_REGNO:
7890 return expand_builtin_eh_return_data_regno (exp);
7891 case BUILT_IN_EXTEND_POINTER:
7892 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7893 case BUILT_IN_EH_POINTER:
7894 return expand_builtin_eh_pointer (exp);
7895 case BUILT_IN_EH_FILTER:
7896 return expand_builtin_eh_filter (exp);
7897 case BUILT_IN_EH_COPY_VALUES:
7898 return expand_builtin_eh_copy_values (exp);
7900 case BUILT_IN_VA_START:
7901 return expand_builtin_va_start (exp);
7902 case BUILT_IN_VA_END:
7903 return expand_builtin_va_end (exp);
7904 case BUILT_IN_VA_COPY:
7905 return expand_builtin_va_copy (exp);
7906 case BUILT_IN_EXPECT:
7907 return expand_builtin_expect (exp, target);
7908 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7909 return expand_builtin_expect_with_probability (exp, target);
7910 case BUILT_IN_ASSUME_ALIGNED:
7911 return expand_builtin_assume_aligned (exp, target);
7912 case BUILT_IN_PREFETCH:
7913 expand_builtin_prefetch (exp);
7914 return const0_rtx;
7916 case BUILT_IN_INIT_TRAMPOLINE:
7917 return expand_builtin_init_trampoline (exp, true);
7918 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7919 return expand_builtin_init_trampoline (exp, false);
7920 case BUILT_IN_ADJUST_TRAMPOLINE:
7921 return expand_builtin_adjust_trampoline (exp);
7923 case BUILT_IN_INIT_DESCRIPTOR:
7924 return expand_builtin_init_descriptor (exp);
7925 case BUILT_IN_ADJUST_DESCRIPTOR:
7926 return expand_builtin_adjust_descriptor (exp);
7928 case BUILT_IN_FORK:
7929 case BUILT_IN_EXECL:
7930 case BUILT_IN_EXECV:
7931 case BUILT_IN_EXECLP:
7932 case BUILT_IN_EXECLE:
7933 case BUILT_IN_EXECVP:
7934 case BUILT_IN_EXECVE:
7935 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7936 if (target)
7937 return target;
7938 break;
7940 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7941 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7942 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7943 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7944 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7945 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7946 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7947 if (target)
7948 return target;
7949 break;
7951 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7952 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7953 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7954 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7955 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7956 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7957 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7958 if (target)
7959 return target;
7960 break;
7962 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7963 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7964 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7965 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7966 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7967 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7968 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7974 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7975 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7976 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7977 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7979 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7980 if (target)
7981 return target;
7982 break;
7984 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7985 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7986 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7987 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7988 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7989 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7990 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7991 if (target)
7992 return target;
7993 break;
7995 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7996 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7997 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7998 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7999 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8001 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8002 if (target)
8003 return target;
8004 break;
8006 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8007 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8008 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8009 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8010 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8012 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8013 if (target)
8014 return target;
8015 break;
8017 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8018 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8019 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8020 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8021 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8022 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8023 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8024 if (target)
8025 return target;
8026 break;
8028 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8029 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8030 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8031 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8032 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8033 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8034 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8035 if (target)
8036 return target;
8037 break;
8039 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8040 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8041 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8042 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8043 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8044 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8045 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8046 if (target)
8047 return target;
8048 break;
8050 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8051 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8052 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8053 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8054 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8055 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8056 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8057 if (target)
8058 return target;
8059 break;
8061 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8062 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8063 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8064 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8065 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8066 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8067 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8068 if (target)
8069 return target;
8070 break;
8072 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8073 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8074 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8075 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8076 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8077 if (mode == VOIDmode)
8078 mode = TYPE_MODE (boolean_type_node);
8079 if (!target || !register_operand (target, mode))
8080 target = gen_reg_rtx (mode);
8082 mode = get_builtin_sync_mode
8083 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8084 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8085 if (target)
8086 return target;
8087 break;
8089 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8090 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8091 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8092 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8093 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8094 mode = get_builtin_sync_mode
8095 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8096 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8097 if (target)
8098 return target;
8099 break;
8101 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8102 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8103 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8104 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8105 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8106 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8107 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8108 if (target)
8109 return target;
8110 break;
8112 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8113 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8114 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8115 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8116 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8117 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8118 expand_builtin_sync_lock_release (mode, exp);
8119 return const0_rtx;
8121 case BUILT_IN_SYNC_SYNCHRONIZE:
8122 expand_builtin_sync_synchronize ();
8123 return const0_rtx;
8125 case BUILT_IN_ATOMIC_EXCHANGE_1:
8126 case BUILT_IN_ATOMIC_EXCHANGE_2:
8127 case BUILT_IN_ATOMIC_EXCHANGE_4:
8128 case BUILT_IN_ATOMIC_EXCHANGE_8:
8129 case BUILT_IN_ATOMIC_EXCHANGE_16:
8130 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8131 target = expand_builtin_atomic_exchange (mode, exp, target);
8132 if (target)
8133 return target;
8134 break;
8136 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8137 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8138 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8139 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8140 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8142 unsigned int nargs, z;
8143 vec<tree, va_gc> *vec;
8145 mode =
8146 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8147 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8148 if (target)
8149 return target;
8151 /* If this is turned into an external library call, the weak parameter
8152 must be dropped to match the expected parameter list. */
8153 nargs = call_expr_nargs (exp);
8154 vec_alloc (vec, nargs - 1);
8155 for (z = 0; z < 3; z++)
8156 vec->quick_push (CALL_EXPR_ARG (exp, z));
8157 /* Skip the boolean weak parameter. */
8158 for (z = 4; z < 6; z++)
8159 vec->quick_push (CALL_EXPR_ARG (exp, z));
8160 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8161 break;
8164 case BUILT_IN_ATOMIC_LOAD_1:
8165 case BUILT_IN_ATOMIC_LOAD_2:
8166 case BUILT_IN_ATOMIC_LOAD_4:
8167 case BUILT_IN_ATOMIC_LOAD_8:
8168 case BUILT_IN_ATOMIC_LOAD_16:
8169 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8170 target = expand_builtin_atomic_load (mode, exp, target);
8171 if (target)
8172 return target;
8173 break;
8175 case BUILT_IN_ATOMIC_STORE_1:
8176 case BUILT_IN_ATOMIC_STORE_2:
8177 case BUILT_IN_ATOMIC_STORE_4:
8178 case BUILT_IN_ATOMIC_STORE_8:
8179 case BUILT_IN_ATOMIC_STORE_16:
8180 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8181 target = expand_builtin_atomic_store (mode, exp);
8182 if (target)
8183 return const0_rtx;
8184 break;
8186 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8187 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8188 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8189 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8190 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8192 enum built_in_function lib;
8193 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8194 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8195 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8196 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8197 ignore, lib);
8198 if (target)
8199 return target;
8200 break;
8202 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8203 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8204 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8205 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8206 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8208 enum built_in_function lib;
8209 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8210 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8211 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8212 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8213 ignore, lib);
8214 if (target)
8215 return target;
8216 break;
8218 case BUILT_IN_ATOMIC_AND_FETCH_1:
8219 case BUILT_IN_ATOMIC_AND_FETCH_2:
8220 case BUILT_IN_ATOMIC_AND_FETCH_4:
8221 case BUILT_IN_ATOMIC_AND_FETCH_8:
8222 case BUILT_IN_ATOMIC_AND_FETCH_16:
8224 enum built_in_function lib;
8225 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8226 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8227 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8228 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8229 ignore, lib);
8230 if (target)
8231 return target;
8232 break;
8234 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8235 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8236 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8237 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8238 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8240 enum built_in_function lib;
8241 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8242 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8243 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8244 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8245 ignore, lib);
8246 if (target)
8247 return target;
8248 break;
8250 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8251 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8252 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8253 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8254 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8256 enum built_in_function lib;
8257 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8258 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8259 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8260 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8261 ignore, lib);
8262 if (target)
8263 return target;
8264 break;
8266 case BUILT_IN_ATOMIC_OR_FETCH_1:
8267 case BUILT_IN_ATOMIC_OR_FETCH_2:
8268 case BUILT_IN_ATOMIC_OR_FETCH_4:
8269 case BUILT_IN_ATOMIC_OR_FETCH_8:
8270 case BUILT_IN_ATOMIC_OR_FETCH_16:
8272 enum built_in_function lib;
8273 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8274 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8275 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8276 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8277 ignore, lib);
8278 if (target)
8279 return target;
8280 break;
8282 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8283 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8284 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8285 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8286 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8287 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8288 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8289 ignore, BUILT_IN_NONE);
8290 if (target)
8291 return target;
8292 break;
8294 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8295 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8296 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8297 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8298 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8299 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8300 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8301 ignore, BUILT_IN_NONE);
8302 if (target)
8303 return target;
8304 break;
8306 case BUILT_IN_ATOMIC_FETCH_AND_1:
8307 case BUILT_IN_ATOMIC_FETCH_AND_2:
8308 case BUILT_IN_ATOMIC_FETCH_AND_4:
8309 case BUILT_IN_ATOMIC_FETCH_AND_8:
8310 case BUILT_IN_ATOMIC_FETCH_AND_16:
8311 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8312 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8313 ignore, BUILT_IN_NONE);
8314 if (target)
8315 return target;
8316 break;
8318 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8319 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8320 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8321 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8322 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8323 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8324 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8325 ignore, BUILT_IN_NONE);
8326 if (target)
8327 return target;
8328 break;
8330 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8331 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8332 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8333 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8334 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8335 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8336 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8337 ignore, BUILT_IN_NONE);
8338 if (target)
8339 return target;
8340 break;
8342 case BUILT_IN_ATOMIC_FETCH_OR_1:
8343 case BUILT_IN_ATOMIC_FETCH_OR_2:
8344 case BUILT_IN_ATOMIC_FETCH_OR_4:
8345 case BUILT_IN_ATOMIC_FETCH_OR_8:
8346 case BUILT_IN_ATOMIC_FETCH_OR_16:
8347 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8348 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8349 ignore, BUILT_IN_NONE);
8350 if (target)
8351 return target;
8352 break;
8354 case BUILT_IN_ATOMIC_TEST_AND_SET:
8355 return expand_builtin_atomic_test_and_set (exp, target);
8357 case BUILT_IN_ATOMIC_CLEAR:
8358 return expand_builtin_atomic_clear (exp);
8360 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8361 return expand_builtin_atomic_always_lock_free (exp);
8363 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8364 target = expand_builtin_atomic_is_lock_free (exp);
8365 if (target)
8366 return target;
8367 break;
8369 case BUILT_IN_ATOMIC_THREAD_FENCE:
8370 expand_builtin_atomic_thread_fence (exp);
8371 return const0_rtx;
8373 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8374 expand_builtin_atomic_signal_fence (exp);
8375 return const0_rtx;
8377 case BUILT_IN_OBJECT_SIZE:
8378 return expand_builtin_object_size (exp);
8380 case BUILT_IN_MEMCPY_CHK:
8381 case BUILT_IN_MEMPCPY_CHK:
8382 case BUILT_IN_MEMMOVE_CHK:
8383 case BUILT_IN_MEMSET_CHK:
8384 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8385 if (target)
8386 return target;
8387 break;
8389 case BUILT_IN_STRCPY_CHK:
8390 case BUILT_IN_STPCPY_CHK:
8391 case BUILT_IN_STRNCPY_CHK:
8392 case BUILT_IN_STPNCPY_CHK:
8393 case BUILT_IN_STRCAT_CHK:
8394 case BUILT_IN_STRNCAT_CHK:
8395 case BUILT_IN_SNPRINTF_CHK:
8396 case BUILT_IN_VSNPRINTF_CHK:
8397 maybe_emit_chk_warning (exp, fcode);
8398 break;
8400 case BUILT_IN_SPRINTF_CHK:
8401 case BUILT_IN_VSPRINTF_CHK:
8402 maybe_emit_sprintf_chk_warning (exp, fcode);
8403 break;
8405 case BUILT_IN_FREE:
8406 if (warn_free_nonheap_object)
8407 maybe_emit_free_warning (exp);
8408 break;
8410 case BUILT_IN_THREAD_POINTER:
8411 return expand_builtin_thread_pointer (exp, target);
8413 case BUILT_IN_SET_THREAD_POINTER:
8414 expand_builtin_set_thread_pointer (exp);
8415 return const0_rtx;
8417 case BUILT_IN_ACC_ON_DEVICE:
8418 /* Do library call, if we failed to expand the builtin when
8419 folding. */
8420 break;
8422 case BUILT_IN_GOACC_PARLEVEL_ID:
8423 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8424 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8426 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8427 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8429 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8430 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8431 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8432 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8433 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8435 return expand_speculation_safe_value (mode, exp, target, ignore);
8437 default: /* just do library call, if unknown builtin */
8438 break;
8441 /* The switch statement above can drop through to cause the function
8442 to be called normally. */
8443 return expand_call (exp, target, ignore);
8446 /* Determine whether a tree node represents a call to a built-in
8447 function. If the tree T is a call to a built-in function with
8448 the right number of arguments of the appropriate types, return
8449 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8450 Otherwise the return value is END_BUILTINS. */
8452 enum built_in_function
8453 builtin_mathfn_code (const_tree t)
8455 const_tree fndecl, arg, parmlist;
8456 const_tree argtype, parmtype;
8457 const_call_expr_arg_iterator iter;
8459 if (TREE_CODE (t) != CALL_EXPR)
8460 return END_BUILTINS;
8462 fndecl = get_callee_fndecl (t);
8463 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8464 return END_BUILTINS;
8466 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8467 init_const_call_expr_arg_iterator (t, &iter);
8468 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8470 /* If a function doesn't take a variable number of arguments,
8471 the last element in the list will have type `void'. */
8472 parmtype = TREE_VALUE (parmlist);
8473 if (VOID_TYPE_P (parmtype))
8475 if (more_const_call_expr_args_p (&iter))
8476 return END_BUILTINS;
8477 return DECL_FUNCTION_CODE (fndecl);
8480 if (! more_const_call_expr_args_p (&iter))
8481 return END_BUILTINS;
8483 arg = next_const_call_expr_arg (&iter);
8484 argtype = TREE_TYPE (arg);
8486 if (SCALAR_FLOAT_TYPE_P (parmtype))
8488 if (! SCALAR_FLOAT_TYPE_P (argtype))
8489 return END_BUILTINS;
8491 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8493 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8494 return END_BUILTINS;
8496 else if (POINTER_TYPE_P (parmtype))
8498 if (! POINTER_TYPE_P (argtype))
8499 return END_BUILTINS;
8501 else if (INTEGRAL_TYPE_P (parmtype))
8503 if (! INTEGRAL_TYPE_P (argtype))
8504 return END_BUILTINS;
8506 else
8507 return END_BUILTINS;
8510 /* Variable-length argument list. */
8511 return DECL_FUNCTION_CODE (fndecl);
8514 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8515 evaluate to a constant. */
8517 static tree
8518 fold_builtin_constant_p (tree arg)
8520 /* We return 1 for a numeric type that's known to be a constant
8521 value at compile-time or for an aggregate type that's a
8522 literal constant. */
8523 STRIP_NOPS (arg);
8525 /* If we know this is a constant, emit the constant of one. */
8526 if (CONSTANT_CLASS_P (arg)
8527 || (TREE_CODE (arg) == CONSTRUCTOR
8528 && TREE_CONSTANT (arg)))
8529 return integer_one_node;
8530 if (TREE_CODE (arg) == ADDR_EXPR)
8532 tree op = TREE_OPERAND (arg, 0);
8533 if (TREE_CODE (op) == STRING_CST
8534 || (TREE_CODE (op) == ARRAY_REF
8535 && integer_zerop (TREE_OPERAND (op, 1))
8536 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8537 return integer_one_node;
8540 /* If this expression has side effects, show we don't know it to be a
8541 constant. Likewise if it's a pointer or aggregate type since in
8542 those case we only want literals, since those are only optimized
8543 when generating RTL, not later.
8544 And finally, if we are compiling an initializer, not code, we
8545 need to return a definite result now; there's not going to be any
8546 more optimization done. */
8547 if (TREE_SIDE_EFFECTS (arg)
8548 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8549 || POINTER_TYPE_P (TREE_TYPE (arg))
8550 || cfun == 0
8551 || folding_initializer
8552 || force_folding_builtin_constant_p)
8553 return integer_zero_node;
8555 return NULL_TREE;
8558 /* Create builtin_expect or builtin_expect_with_probability
8559 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8560 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8561 builtin_expect_with_probability instead uses third argument as PROBABILITY
8562 value. */
8564 static tree
8565 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8566 tree predictor, tree probability)
8568 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8570 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8571 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8572 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8573 ret_type = TREE_TYPE (TREE_TYPE (fn));
8574 pred_type = TREE_VALUE (arg_types);
8575 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8577 pred = fold_convert_loc (loc, pred_type, pred);
8578 expected = fold_convert_loc (loc, expected_type, expected);
8580 if (probability)
8581 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8582 else
8583 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8584 predictor);
8586 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8587 build_int_cst (ret_type, 0));
8590 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8591 NULL_TREE if no simplification is possible. */
8593 tree
8594 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8595 tree arg3)
8597 tree inner, fndecl, inner_arg0;
8598 enum tree_code code;
8600 /* Distribute the expected value over short-circuiting operators.
8601 See through the cast from truthvalue_type_node to long. */
8602 inner_arg0 = arg0;
8603 while (CONVERT_EXPR_P (inner_arg0)
8604 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8605 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8606 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8608 /* If this is a builtin_expect within a builtin_expect keep the
8609 inner one. See through a comparison against a constant. It
8610 might have been added to create a thruthvalue. */
8611 inner = inner_arg0;
8613 if (COMPARISON_CLASS_P (inner)
8614 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8615 inner = TREE_OPERAND (inner, 0);
8617 if (TREE_CODE (inner) == CALL_EXPR
8618 && (fndecl = get_callee_fndecl (inner))
8619 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8620 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8621 return arg0;
8623 inner = inner_arg0;
8624 code = TREE_CODE (inner);
8625 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8627 tree op0 = TREE_OPERAND (inner, 0);
8628 tree op1 = TREE_OPERAND (inner, 1);
8629 arg1 = save_expr (arg1);
8631 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8632 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8633 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8635 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8638 /* If the argument isn't invariant then there's nothing else we can do. */
8639 if (!TREE_CONSTANT (inner_arg0))
8640 return NULL_TREE;
8642 /* If we expect that a comparison against the argument will fold to
8643 a constant return the constant. In practice, this means a true
8644 constant or the address of a non-weak symbol. */
8645 inner = inner_arg0;
8646 STRIP_NOPS (inner);
8647 if (TREE_CODE (inner) == ADDR_EXPR)
8651 inner = TREE_OPERAND (inner, 0);
8653 while (TREE_CODE (inner) == COMPONENT_REF
8654 || TREE_CODE (inner) == ARRAY_REF);
8655 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8656 return NULL_TREE;
8659 /* Otherwise, ARG0 already has the proper type for the return value. */
8660 return arg0;
8663 /* Fold a call to __builtin_classify_type with argument ARG. */
8665 static tree
8666 fold_builtin_classify_type (tree arg)
8668 if (arg == 0)
8669 return build_int_cst (integer_type_node, no_type_class);
8671 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8674 /* Fold a call to __builtin_strlen with argument ARG. */
8676 static tree
8677 fold_builtin_strlen (location_t loc, tree type, tree arg)
8679 if (!validate_arg (arg, POINTER_TYPE))
8680 return NULL_TREE;
8681 else
8683 c_strlen_data lendata = { };
8684 tree len = c_strlen (arg, 0, &lendata);
8686 if (len)
8687 return fold_convert_loc (loc, type, len);
8689 if (!lendata.decl)
8690 c_strlen (arg, 1, &lendata);
8692 if (lendata.decl)
8694 if (EXPR_HAS_LOCATION (arg))
8695 loc = EXPR_LOCATION (arg);
8696 else if (loc == UNKNOWN_LOCATION)
8697 loc = input_location;
8698 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8701 return NULL_TREE;
8705 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8707 static tree
8708 fold_builtin_inf (location_t loc, tree type, int warn)
8710 REAL_VALUE_TYPE real;
8712 /* __builtin_inff is intended to be usable to define INFINITY on all
8713 targets. If an infinity is not available, INFINITY expands "to a
8714 positive constant of type float that overflows at translation
8715 time", footnote "In this case, using INFINITY will violate the
8716 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8717 Thus we pedwarn to ensure this constraint violation is
8718 diagnosed. */
8719 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8720 pedwarn (loc, 0, "target format does not support infinity");
8722 real_inf (&real);
8723 return build_real (type, real);
8726 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8727 NULL_TREE if no simplification can be made. */
8729 static tree
8730 fold_builtin_sincos (location_t loc,
8731 tree arg0, tree arg1, tree arg2)
8733 tree type;
8734 tree fndecl, call = NULL_TREE;
8736 if (!validate_arg (arg0, REAL_TYPE)
8737 || !validate_arg (arg1, POINTER_TYPE)
8738 || !validate_arg (arg2, POINTER_TYPE))
8739 return NULL_TREE;
8741 type = TREE_TYPE (arg0);
8743 /* Calculate the result when the argument is a constant. */
8744 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8745 if (fn == END_BUILTINS)
8746 return NULL_TREE;
8748 /* Canonicalize sincos to cexpi. */
8749 if (TREE_CODE (arg0) == REAL_CST)
8751 tree complex_type = build_complex_type (type);
8752 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8754 if (!call)
8756 if (!targetm.libc_has_function (function_c99_math_complex)
8757 || !builtin_decl_implicit_p (fn))
8758 return NULL_TREE;
8759 fndecl = builtin_decl_explicit (fn);
8760 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8761 call = builtin_save_expr (call);
8764 tree ptype = build_pointer_type (type);
8765 arg1 = fold_convert (ptype, arg1);
8766 arg2 = fold_convert (ptype, arg2);
8767 return build2 (COMPOUND_EXPR, void_type_node,
8768 build2 (MODIFY_EXPR, void_type_node,
8769 build_fold_indirect_ref_loc (loc, arg1),
8770 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8771 build2 (MODIFY_EXPR, void_type_node,
8772 build_fold_indirect_ref_loc (loc, arg2),
8773 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8776 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8777 Return NULL_TREE if no simplification can be made. */
8779 static tree
8780 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8782 if (!validate_arg (arg1, POINTER_TYPE)
8783 || !validate_arg (arg2, POINTER_TYPE)
8784 || !validate_arg (len, INTEGER_TYPE))
8785 return NULL_TREE;
8787 /* If the LEN parameter is zero, return zero. */
8788 if (integer_zerop (len))
8789 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8790 arg1, arg2);
8792 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8793 if (operand_equal_p (arg1, arg2, 0))
8794 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8796 /* If len parameter is one, return an expression corresponding to
8797 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8798 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8800 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8801 tree cst_uchar_ptr_node
8802 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8804 tree ind1
8805 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8808 cst_uchar_ptr_node,
8809 arg1)));
8810 tree ind2
8811 = fold_convert_loc (loc, integer_type_node,
8812 build1 (INDIRECT_REF, cst_uchar_node,
8813 fold_convert_loc (loc,
8814 cst_uchar_ptr_node,
8815 arg2)));
8816 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8819 return NULL_TREE;
8822 /* Fold a call to builtin isascii with argument ARG. */
8824 static tree
8825 fold_builtin_isascii (location_t loc, tree arg)
8827 if (!validate_arg (arg, INTEGER_TYPE))
8828 return NULL_TREE;
8829 else
8831 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8832 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8833 build_int_cst (integer_type_node,
8834 ~ (unsigned HOST_WIDE_INT) 0x7f));
8835 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8836 arg, integer_zero_node);
8840 /* Fold a call to builtin toascii with argument ARG. */
8842 static tree
8843 fold_builtin_toascii (location_t loc, tree arg)
8845 if (!validate_arg (arg, INTEGER_TYPE))
8846 return NULL_TREE;
8848 /* Transform toascii(c) -> (c & 0x7f). */
8849 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8850 build_int_cst (integer_type_node, 0x7f));
8853 /* Fold a call to builtin isdigit with argument ARG. */
8855 static tree
8856 fold_builtin_isdigit (location_t loc, tree arg)
8858 if (!validate_arg (arg, INTEGER_TYPE))
8859 return NULL_TREE;
8860 else
8862 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8863 /* According to the C standard, isdigit is unaffected by locale.
8864 However, it definitely is affected by the target character set. */
8865 unsigned HOST_WIDE_INT target_digit0
8866 = lang_hooks.to_target_charset ('0');
8868 if (target_digit0 == 0)
8869 return NULL_TREE;
8871 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8872 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8873 build_int_cst (unsigned_type_node, target_digit0));
8874 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8875 build_int_cst (unsigned_type_node, 9));
8879 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8881 static tree
8882 fold_builtin_fabs (location_t loc, tree arg, tree type)
8884 if (!validate_arg (arg, REAL_TYPE))
8885 return NULL_TREE;
8887 arg = fold_convert_loc (loc, type, arg);
8888 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8891 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8893 static tree
8894 fold_builtin_abs (location_t loc, tree arg, tree type)
8896 if (!validate_arg (arg, INTEGER_TYPE))
8897 return NULL_TREE;
8899 arg = fold_convert_loc (loc, type, arg);
8900 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8903 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8905 static tree
8906 fold_builtin_carg (location_t loc, tree arg, tree type)
8908 if (validate_arg (arg, COMPLEX_TYPE)
8909 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8911 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8913 if (atan2_fn)
8915 tree new_arg = builtin_save_expr (arg);
8916 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8917 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8918 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8922 return NULL_TREE;
8925 /* Fold a call to builtin frexp, we can assume the base is 2. */
8927 static tree
8928 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8930 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8931 return NULL_TREE;
8933 STRIP_NOPS (arg0);
8935 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8936 return NULL_TREE;
8938 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8940 /* Proceed if a valid pointer type was passed in. */
8941 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8943 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8944 tree frac, exp;
8946 switch (value->cl)
8948 case rvc_zero:
8949 /* For +-0, return (*exp = 0, +-0). */
8950 exp = integer_zero_node;
8951 frac = arg0;
8952 break;
8953 case rvc_nan:
8954 case rvc_inf:
8955 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8956 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8957 case rvc_normal:
8959 /* Since the frexp function always expects base 2, and in
8960 GCC normalized significands are already in the range
8961 [0.5, 1.0), we have exactly what frexp wants. */
8962 REAL_VALUE_TYPE frac_rvt = *value;
8963 SET_REAL_EXP (&frac_rvt, 0);
8964 frac = build_real (rettype, frac_rvt);
8965 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8967 break;
8968 default:
8969 gcc_unreachable ();
8972 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8973 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8974 TREE_SIDE_EFFECTS (arg1) = 1;
8975 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8978 return NULL_TREE;
8981 /* Fold a call to builtin modf. */
8983 static tree
8984 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8986 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8987 return NULL_TREE;
8989 STRIP_NOPS (arg0);
8991 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8992 return NULL_TREE;
8994 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8996 /* Proceed if a valid pointer type was passed in. */
8997 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8999 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9000 REAL_VALUE_TYPE trunc, frac;
9002 switch (value->cl)
9004 case rvc_nan:
9005 case rvc_zero:
9006 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9007 trunc = frac = *value;
9008 break;
9009 case rvc_inf:
9010 /* For +-Inf, return (*arg1 = arg0, +-0). */
9011 frac = dconst0;
9012 frac.sign = value->sign;
9013 trunc = *value;
9014 break;
9015 case rvc_normal:
9016 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9017 real_trunc (&trunc, VOIDmode, value);
9018 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9019 /* If the original number was negative and already
9020 integral, then the fractional part is -0.0. */
9021 if (value->sign && frac.cl == rvc_zero)
9022 frac.sign = value->sign;
9023 break;
9026 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9027 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9028 build_real (rettype, trunc));
9029 TREE_SIDE_EFFECTS (arg1) = 1;
9030 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9031 build_real (rettype, frac));
9034 return NULL_TREE;
9037 /* Given a location LOC, an interclass builtin function decl FNDECL
9038 and its single argument ARG, return an folded expression computing
9039 the same, or NULL_TREE if we either couldn't or didn't want to fold
9040 (the latter happen if there's an RTL instruction available). */
9042 static tree
9043 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9045 machine_mode mode;
9047 if (!validate_arg (arg, REAL_TYPE))
9048 return NULL_TREE;
9050 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9051 return NULL_TREE;
9053 mode = TYPE_MODE (TREE_TYPE (arg));
9055 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9057 /* If there is no optab, try generic code. */
9058 switch (DECL_FUNCTION_CODE (fndecl))
9060 tree result;
9062 CASE_FLT_FN (BUILT_IN_ISINF):
9064 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9065 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9066 tree type = TREE_TYPE (arg);
9067 REAL_VALUE_TYPE r;
9068 char buf[128];
9070 if (is_ibm_extended)
9072 /* NaN and Inf are encoded in the high-order double value
9073 only. The low-order value is not significant. */
9074 type = double_type_node;
9075 mode = DFmode;
9076 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9078 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9079 real_from_string (&r, buf);
9080 result = build_call_expr (isgr_fn, 2,
9081 fold_build1_loc (loc, ABS_EXPR, type, arg),
9082 build_real (type, r));
9083 return result;
9085 CASE_FLT_FN (BUILT_IN_FINITE):
9086 case BUILT_IN_ISFINITE:
9088 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9089 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9090 tree type = TREE_TYPE (arg);
9091 REAL_VALUE_TYPE r;
9092 char buf[128];
9094 if (is_ibm_extended)
9096 /* NaN and Inf are encoded in the high-order double value
9097 only. The low-order value is not significant. */
9098 type = double_type_node;
9099 mode = DFmode;
9100 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9102 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9103 real_from_string (&r, buf);
9104 result = build_call_expr (isle_fn, 2,
9105 fold_build1_loc (loc, ABS_EXPR, type, arg),
9106 build_real (type, r));
9107 /*result = fold_build2_loc (loc, UNGT_EXPR,
9108 TREE_TYPE (TREE_TYPE (fndecl)),
9109 fold_build1_loc (loc, ABS_EXPR, type, arg),
9110 build_real (type, r));
9111 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9112 TREE_TYPE (TREE_TYPE (fndecl)),
9113 result);*/
9114 return result;
9116 case BUILT_IN_ISNORMAL:
9118 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9119 islessequal(fabs(x),DBL_MAX). */
9120 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9121 tree type = TREE_TYPE (arg);
9122 tree orig_arg, max_exp, min_exp;
9123 machine_mode orig_mode = mode;
9124 REAL_VALUE_TYPE rmax, rmin;
9125 char buf[128];
9127 orig_arg = arg = builtin_save_expr (arg);
9128 if (is_ibm_extended)
9130 /* Use double to test the normal range of IBM extended
9131 precision. Emin for IBM extended precision is
9132 different to emin for IEEE double, being 53 higher
9133 since the low double exponent is at least 53 lower
9134 than the high double exponent. */
9135 type = double_type_node;
9136 mode = DFmode;
9137 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9139 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9141 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9142 real_from_string (&rmax, buf);
9143 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9144 real_from_string (&rmin, buf);
9145 max_exp = build_real (type, rmax);
9146 min_exp = build_real (type, rmin);
9148 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9149 if (is_ibm_extended)
9151 /* Testing the high end of the range is done just using
9152 the high double, using the same test as isfinite().
9153 For the subnormal end of the range we first test the
9154 high double, then if its magnitude is equal to the
9155 limit of 0x1p-969, we test whether the low double is
9156 non-zero and opposite sign to the high double. */
9157 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9158 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9159 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9160 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9161 arg, min_exp);
9162 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9163 complex_double_type_node, orig_arg);
9164 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9165 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9166 tree zero = build_real (type, dconst0);
9167 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9168 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9169 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9170 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9171 fold_build3 (COND_EXPR,
9172 integer_type_node,
9173 hilt, logt, lolt));
9174 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9175 eq_min, ok_lo);
9176 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9177 gt_min, eq_min);
9179 else
9181 tree const isge_fn
9182 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9183 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9185 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9186 max_exp, min_exp);
9187 return result;
9189 default:
9190 break;
9193 return NULL_TREE;
9196 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9197 ARG is the argument for the call. */
9199 static tree
9200 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9202 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9204 if (!validate_arg (arg, REAL_TYPE))
9205 return NULL_TREE;
9207 switch (builtin_index)
9209 case BUILT_IN_ISINF:
9210 if (!HONOR_INFINITIES (arg))
9211 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9213 return NULL_TREE;
9215 case BUILT_IN_ISINF_SIGN:
9217 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9218 /* In a boolean context, GCC will fold the inner COND_EXPR to
9219 1. So e.g. "if (isinf_sign(x))" would be folded to just
9220 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9221 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9222 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9223 tree tmp = NULL_TREE;
9225 arg = builtin_save_expr (arg);
9227 if (signbit_fn && isinf_fn)
9229 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9230 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9232 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9233 signbit_call, integer_zero_node);
9234 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9235 isinf_call, integer_zero_node);
9237 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9238 integer_minus_one_node, integer_one_node);
9239 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9240 isinf_call, tmp,
9241 integer_zero_node);
9244 return tmp;
9247 case BUILT_IN_ISFINITE:
9248 if (!HONOR_NANS (arg)
9249 && !HONOR_INFINITIES (arg))
9250 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9252 return NULL_TREE;
9254 case BUILT_IN_ISNAN:
9255 if (!HONOR_NANS (arg))
9256 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9259 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9260 if (is_ibm_extended)
9262 /* NaN and Inf are encoded in the high-order double value
9263 only. The low-order value is not significant. */
9264 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9267 arg = builtin_save_expr (arg);
9268 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9270 default:
9271 gcc_unreachable ();
9275 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9276 This builtin will generate code to return the appropriate floating
9277 point classification depending on the value of the floating point
9278 number passed in. The possible return values must be supplied as
9279 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9280 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9281 one floating point argument which is "type generic". */
9283 static tree
9284 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9286 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9287 arg, type, res, tmp;
9288 machine_mode mode;
9289 REAL_VALUE_TYPE r;
9290 char buf[128];
9292 /* Verify the required arguments in the original call. */
9293 if (nargs != 6
9294 || !validate_arg (args[0], INTEGER_TYPE)
9295 || !validate_arg (args[1], INTEGER_TYPE)
9296 || !validate_arg (args[2], INTEGER_TYPE)
9297 || !validate_arg (args[3], INTEGER_TYPE)
9298 || !validate_arg (args[4], INTEGER_TYPE)
9299 || !validate_arg (args[5], REAL_TYPE))
9300 return NULL_TREE;
9302 fp_nan = args[0];
9303 fp_infinite = args[1];
9304 fp_normal = args[2];
9305 fp_subnormal = args[3];
9306 fp_zero = args[4];
9307 arg = args[5];
9308 type = TREE_TYPE (arg);
9309 mode = TYPE_MODE (type);
9310 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9312 /* fpclassify(x) ->
9313 isnan(x) ? FP_NAN :
9314 (fabs(x) == Inf ? FP_INFINITE :
9315 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9316 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9318 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9319 build_real (type, dconst0));
9320 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9321 tmp, fp_zero, fp_subnormal);
9323 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9324 real_from_string (&r, buf);
9325 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9326 arg, build_real (type, r));
9327 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9329 if (HONOR_INFINITIES (mode))
9331 real_inf (&r);
9332 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9333 build_real (type, r));
9334 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9335 fp_infinite, res);
9338 if (HONOR_NANS (mode))
9340 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9341 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9344 return res;
9347 /* Fold a call to an unordered comparison function such as
9348 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9349 being called and ARG0 and ARG1 are the arguments for the call.
9350 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9351 the opposite of the desired result. UNORDERED_CODE is used
9352 for modes that can hold NaNs and ORDERED_CODE is used for
9353 the rest. */
9355 static tree
9356 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9357 enum tree_code unordered_code,
9358 enum tree_code ordered_code)
9360 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9361 enum tree_code code;
9362 tree type0, type1;
9363 enum tree_code code0, code1;
9364 tree cmp_type = NULL_TREE;
9366 type0 = TREE_TYPE (arg0);
9367 type1 = TREE_TYPE (arg1);
9369 code0 = TREE_CODE (type0);
9370 code1 = TREE_CODE (type1);
9372 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9373 /* Choose the wider of two real types. */
9374 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9375 ? type0 : type1;
9376 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9377 cmp_type = type0;
9378 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9379 cmp_type = type1;
9381 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9382 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9384 if (unordered_code == UNORDERED_EXPR)
9386 if (!HONOR_NANS (arg0))
9387 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9388 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9391 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9392 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9393 fold_build2_loc (loc, code, type, arg0, arg1));
9396 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9397 arithmetics if it can never overflow, or into internal functions that
9398 return both result of arithmetics and overflowed boolean flag in
9399 a complex integer result, or some other check for overflow.
9400 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9401 checking part of that. */
9403 static tree
9404 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9405 tree arg0, tree arg1, tree arg2)
9407 enum internal_fn ifn = IFN_LAST;
9408 /* The code of the expression corresponding to the built-in. */
9409 enum tree_code opcode = ERROR_MARK;
9410 bool ovf_only = false;
9412 switch (fcode)
9414 case BUILT_IN_ADD_OVERFLOW_P:
9415 ovf_only = true;
9416 /* FALLTHRU */
9417 case BUILT_IN_ADD_OVERFLOW:
9418 case BUILT_IN_SADD_OVERFLOW:
9419 case BUILT_IN_SADDL_OVERFLOW:
9420 case BUILT_IN_SADDLL_OVERFLOW:
9421 case BUILT_IN_UADD_OVERFLOW:
9422 case BUILT_IN_UADDL_OVERFLOW:
9423 case BUILT_IN_UADDLL_OVERFLOW:
9424 opcode = PLUS_EXPR;
9425 ifn = IFN_ADD_OVERFLOW;
9426 break;
9427 case BUILT_IN_SUB_OVERFLOW_P:
9428 ovf_only = true;
9429 /* FALLTHRU */
9430 case BUILT_IN_SUB_OVERFLOW:
9431 case BUILT_IN_SSUB_OVERFLOW:
9432 case BUILT_IN_SSUBL_OVERFLOW:
9433 case BUILT_IN_SSUBLL_OVERFLOW:
9434 case BUILT_IN_USUB_OVERFLOW:
9435 case BUILT_IN_USUBL_OVERFLOW:
9436 case BUILT_IN_USUBLL_OVERFLOW:
9437 opcode = MINUS_EXPR;
9438 ifn = IFN_SUB_OVERFLOW;
9439 break;
9440 case BUILT_IN_MUL_OVERFLOW_P:
9441 ovf_only = true;
9442 /* FALLTHRU */
9443 case BUILT_IN_MUL_OVERFLOW:
9444 case BUILT_IN_SMUL_OVERFLOW:
9445 case BUILT_IN_SMULL_OVERFLOW:
9446 case BUILT_IN_SMULLL_OVERFLOW:
9447 case BUILT_IN_UMUL_OVERFLOW:
9448 case BUILT_IN_UMULL_OVERFLOW:
9449 case BUILT_IN_UMULLL_OVERFLOW:
9450 opcode = MULT_EXPR;
9451 ifn = IFN_MUL_OVERFLOW;
9452 break;
9453 default:
9454 gcc_unreachable ();
9457 /* For the "generic" overloads, the first two arguments can have different
9458 types and the last argument determines the target type to use to check
9459 for overflow. The arguments of the other overloads all have the same
9460 type. */
9461 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9463 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9464 arguments are constant, attempt to fold the built-in call into a constant
9465 expression indicating whether or not it detected an overflow. */
9466 if (ovf_only
9467 && TREE_CODE (arg0) == INTEGER_CST
9468 && TREE_CODE (arg1) == INTEGER_CST)
9469 /* Perform the computation in the target type and check for overflow. */
9470 return omit_one_operand_loc (loc, boolean_type_node,
9471 arith_overflowed_p (opcode, type, arg0, arg1)
9472 ? boolean_true_node : boolean_false_node,
9473 arg2);
9475 tree intres, ovfres;
9476 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9478 intres = fold_binary_loc (loc, opcode, type,
9479 fold_convert_loc (loc, type, arg0),
9480 fold_convert_loc (loc, type, arg1));
9481 if (TREE_OVERFLOW (intres))
9482 intres = drop_tree_overflow (intres);
9483 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9484 ? boolean_true_node : boolean_false_node);
9486 else
9488 tree ctype = build_complex_type (type);
9489 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9490 arg0, arg1);
9491 tree tgt = save_expr (call);
9492 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9493 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9494 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9497 if (ovf_only)
9498 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9500 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9501 tree store
9502 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9503 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9506 /* Fold a call to __builtin_FILE to a constant string. */
9508 static inline tree
9509 fold_builtin_FILE (location_t loc)
9511 if (const char *fname = LOCATION_FILE (loc))
9513 /* The documentation says this builtin is equivalent to the preprocessor
9514 __FILE__ macro so it appears appropriate to use the same file prefix
9515 mappings. */
9516 fname = remap_macro_filename (fname);
9517 return build_string_literal (strlen (fname) + 1, fname);
9520 return build_string_literal (1, "");
9523 /* Fold a call to __builtin_FUNCTION to a constant string. */
9525 static inline tree
9526 fold_builtin_FUNCTION ()
9528 const char *name = "";
9530 if (current_function_decl)
9531 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9533 return build_string_literal (strlen (name) + 1, name);
9536 /* Fold a call to __builtin_LINE to an integer constant. */
9538 static inline tree
9539 fold_builtin_LINE (location_t loc, tree type)
9541 return build_int_cst (type, LOCATION_LINE (loc));
9544 /* Fold a call to built-in function FNDECL with 0 arguments.
9545 This function returns NULL_TREE if no simplification was possible. */
9547 static tree
9548 fold_builtin_0 (location_t loc, tree fndecl)
9550 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9551 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9552 switch (fcode)
9554 case BUILT_IN_FILE:
9555 return fold_builtin_FILE (loc);
9557 case BUILT_IN_FUNCTION:
9558 return fold_builtin_FUNCTION ();
9560 case BUILT_IN_LINE:
9561 return fold_builtin_LINE (loc, type);
9563 CASE_FLT_FN (BUILT_IN_INF):
9564 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9565 case BUILT_IN_INFD32:
9566 case BUILT_IN_INFD64:
9567 case BUILT_IN_INFD128:
9568 return fold_builtin_inf (loc, type, true);
9570 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9571 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9572 return fold_builtin_inf (loc, type, false);
9574 case BUILT_IN_CLASSIFY_TYPE:
9575 return fold_builtin_classify_type (NULL_TREE);
9577 default:
9578 break;
9580 return NULL_TREE;
9583 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9584 This function returns NULL_TREE if no simplification was possible. */
9586 static tree
9587 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9589 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9590 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9592 if (TREE_CODE (arg0) == ERROR_MARK)
9593 return NULL_TREE;
9595 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9596 return ret;
9598 switch (fcode)
9600 case BUILT_IN_CONSTANT_P:
9602 tree val = fold_builtin_constant_p (arg0);
9604 /* Gimplification will pull the CALL_EXPR for the builtin out of
9605 an if condition. When not optimizing, we'll not CSE it back.
9606 To avoid link error types of regressions, return false now. */
9607 if (!val && !optimize)
9608 val = integer_zero_node;
9610 return val;
9613 case BUILT_IN_CLASSIFY_TYPE:
9614 return fold_builtin_classify_type (arg0);
9616 case BUILT_IN_STRLEN:
9617 return fold_builtin_strlen (loc, type, arg0);
9619 CASE_FLT_FN (BUILT_IN_FABS):
9620 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9621 case BUILT_IN_FABSD32:
9622 case BUILT_IN_FABSD64:
9623 case BUILT_IN_FABSD128:
9624 return fold_builtin_fabs (loc, arg0, type);
9626 case BUILT_IN_ABS:
9627 case BUILT_IN_LABS:
9628 case BUILT_IN_LLABS:
9629 case BUILT_IN_IMAXABS:
9630 return fold_builtin_abs (loc, arg0, type);
9632 CASE_FLT_FN (BUILT_IN_CONJ):
9633 if (validate_arg (arg0, COMPLEX_TYPE)
9634 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9635 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9636 break;
9638 CASE_FLT_FN (BUILT_IN_CREAL):
9639 if (validate_arg (arg0, COMPLEX_TYPE)
9640 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9641 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9642 break;
9644 CASE_FLT_FN (BUILT_IN_CIMAG):
9645 if (validate_arg (arg0, COMPLEX_TYPE)
9646 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9647 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9648 break;
9650 CASE_FLT_FN (BUILT_IN_CARG):
9651 return fold_builtin_carg (loc, arg0, type);
9653 case BUILT_IN_ISASCII:
9654 return fold_builtin_isascii (loc, arg0);
9656 case BUILT_IN_TOASCII:
9657 return fold_builtin_toascii (loc, arg0);
9659 case BUILT_IN_ISDIGIT:
9660 return fold_builtin_isdigit (loc, arg0);
9662 CASE_FLT_FN (BUILT_IN_FINITE):
9663 case BUILT_IN_FINITED32:
9664 case BUILT_IN_FINITED64:
9665 case BUILT_IN_FINITED128:
9666 case BUILT_IN_ISFINITE:
9668 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9669 if (ret)
9670 return ret;
9671 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9674 CASE_FLT_FN (BUILT_IN_ISINF):
9675 case BUILT_IN_ISINFD32:
9676 case BUILT_IN_ISINFD64:
9677 case BUILT_IN_ISINFD128:
9679 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9680 if (ret)
9681 return ret;
9682 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9685 case BUILT_IN_ISNORMAL:
9686 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9688 case BUILT_IN_ISINF_SIGN:
9689 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9691 CASE_FLT_FN (BUILT_IN_ISNAN):
9692 case BUILT_IN_ISNAND32:
9693 case BUILT_IN_ISNAND64:
9694 case BUILT_IN_ISNAND128:
9695 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9697 case BUILT_IN_FREE:
9698 if (integer_zerop (arg0))
9699 return build_empty_stmt (loc);
9700 break;
9702 default:
9703 break;
9706 return NULL_TREE;
9710 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9711 This function returns NULL_TREE if no simplification was possible. */
9713 static tree
9714 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9716 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9717 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9719 if (TREE_CODE (arg0) == ERROR_MARK
9720 || TREE_CODE (arg1) == ERROR_MARK)
9721 return NULL_TREE;
9723 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9724 return ret;
9726 switch (fcode)
9728 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9729 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9730 if (validate_arg (arg0, REAL_TYPE)
9731 && validate_arg (arg1, POINTER_TYPE))
9732 return do_mpfr_lgamma_r (arg0, arg1, type);
9733 break;
9735 CASE_FLT_FN (BUILT_IN_FREXP):
9736 return fold_builtin_frexp (loc, arg0, arg1, type);
9738 CASE_FLT_FN (BUILT_IN_MODF):
9739 return fold_builtin_modf (loc, arg0, arg1, type);
9741 case BUILT_IN_STRSPN:
9742 return fold_builtin_strspn (loc, arg0, arg1);
9744 case BUILT_IN_STRCSPN:
9745 return fold_builtin_strcspn (loc, arg0, arg1);
9747 case BUILT_IN_STRPBRK:
9748 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9750 case BUILT_IN_EXPECT:
9751 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9753 case BUILT_IN_ISGREATER:
9754 return fold_builtin_unordered_cmp (loc, fndecl,
9755 arg0, arg1, UNLE_EXPR, LE_EXPR);
9756 case BUILT_IN_ISGREATEREQUAL:
9757 return fold_builtin_unordered_cmp (loc, fndecl,
9758 arg0, arg1, UNLT_EXPR, LT_EXPR);
9759 case BUILT_IN_ISLESS:
9760 return fold_builtin_unordered_cmp (loc, fndecl,
9761 arg0, arg1, UNGE_EXPR, GE_EXPR);
9762 case BUILT_IN_ISLESSEQUAL:
9763 return fold_builtin_unordered_cmp (loc, fndecl,
9764 arg0, arg1, UNGT_EXPR, GT_EXPR);
9765 case BUILT_IN_ISLESSGREATER:
9766 return fold_builtin_unordered_cmp (loc, fndecl,
9767 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9768 case BUILT_IN_ISUNORDERED:
9769 return fold_builtin_unordered_cmp (loc, fndecl,
9770 arg0, arg1, UNORDERED_EXPR,
9771 NOP_EXPR);
9773 /* We do the folding for va_start in the expander. */
9774 case BUILT_IN_VA_START:
9775 break;
9777 case BUILT_IN_OBJECT_SIZE:
9778 return fold_builtin_object_size (arg0, arg1);
9780 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9781 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9783 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9784 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9786 default:
9787 break;
9789 return NULL_TREE;
9792 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9793 and ARG2.
9794 This function returns NULL_TREE if no simplification was possible. */
9796 static tree
9797 fold_builtin_3 (location_t loc, tree fndecl,
9798 tree arg0, tree arg1, tree arg2)
9800 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9801 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9803 if (TREE_CODE (arg0) == ERROR_MARK
9804 || TREE_CODE (arg1) == ERROR_MARK
9805 || TREE_CODE (arg2) == ERROR_MARK)
9806 return NULL_TREE;
9808 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9809 arg0, arg1, arg2))
9810 return ret;
9812 switch (fcode)
9815 CASE_FLT_FN (BUILT_IN_SINCOS):
9816 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9818 CASE_FLT_FN (BUILT_IN_REMQUO):
9819 if (validate_arg (arg0, REAL_TYPE)
9820 && validate_arg (arg1, REAL_TYPE)
9821 && validate_arg (arg2, POINTER_TYPE))
9822 return do_mpfr_remquo (arg0, arg1, arg2);
9823 break;
9825 case BUILT_IN_MEMCMP:
9826 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9828 case BUILT_IN_EXPECT:
9829 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9831 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9832 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9834 case BUILT_IN_ADD_OVERFLOW:
9835 case BUILT_IN_SUB_OVERFLOW:
9836 case BUILT_IN_MUL_OVERFLOW:
9837 case BUILT_IN_ADD_OVERFLOW_P:
9838 case BUILT_IN_SUB_OVERFLOW_P:
9839 case BUILT_IN_MUL_OVERFLOW_P:
9840 case BUILT_IN_SADD_OVERFLOW:
9841 case BUILT_IN_SADDL_OVERFLOW:
9842 case BUILT_IN_SADDLL_OVERFLOW:
9843 case BUILT_IN_SSUB_OVERFLOW:
9844 case BUILT_IN_SSUBL_OVERFLOW:
9845 case BUILT_IN_SSUBLL_OVERFLOW:
9846 case BUILT_IN_SMUL_OVERFLOW:
9847 case BUILT_IN_SMULL_OVERFLOW:
9848 case BUILT_IN_SMULLL_OVERFLOW:
9849 case BUILT_IN_UADD_OVERFLOW:
9850 case BUILT_IN_UADDL_OVERFLOW:
9851 case BUILT_IN_UADDLL_OVERFLOW:
9852 case BUILT_IN_USUB_OVERFLOW:
9853 case BUILT_IN_USUBL_OVERFLOW:
9854 case BUILT_IN_USUBLL_OVERFLOW:
9855 case BUILT_IN_UMUL_OVERFLOW:
9856 case BUILT_IN_UMULL_OVERFLOW:
9857 case BUILT_IN_UMULLL_OVERFLOW:
9858 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9860 default:
9861 break;
9863 return NULL_TREE;
9866 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9867 arguments. IGNORE is true if the result of the
9868 function call is ignored. This function returns NULL_TREE if no
9869 simplification was possible. */
9871 tree
9872 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9874 tree ret = NULL_TREE;
9876 switch (nargs)
9878 case 0:
9879 ret = fold_builtin_0 (loc, fndecl);
9880 break;
9881 case 1:
9882 ret = fold_builtin_1 (loc, fndecl, args[0]);
9883 break;
9884 case 2:
9885 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9886 break;
9887 case 3:
9888 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9889 break;
9890 default:
9891 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9892 break;
9894 if (ret)
9896 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9897 SET_EXPR_LOCATION (ret, loc);
9898 return ret;
9900 return NULL_TREE;
9903 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9904 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9905 of arguments in ARGS to be omitted. OLDNARGS is the number of
9906 elements in ARGS. */
9908 static tree
9909 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9910 int skip, tree fndecl, int n, va_list newargs)
9912 int nargs = oldnargs - skip + n;
9913 tree *buffer;
9915 if (n > 0)
9917 int i, j;
9919 buffer = XALLOCAVEC (tree, nargs);
9920 for (i = 0; i < n; i++)
9921 buffer[i] = va_arg (newargs, tree);
9922 for (j = skip; j < oldnargs; j++, i++)
9923 buffer[i] = args[j];
9925 else
9926 buffer = args + skip;
9928 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9931 /* Return true if FNDECL shouldn't be folded right now.
9932 If a built-in function has an inline attribute always_inline
9933 wrapper, defer folding it after always_inline functions have
9934 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9935 might not be performed. */
9937 bool
9938 avoid_folding_inline_builtin (tree fndecl)
9940 return (DECL_DECLARED_INLINE_P (fndecl)
9941 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9942 && cfun
9943 && !cfun->always_inline_functions_inlined
9944 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9947 /* A wrapper function for builtin folding that prevents warnings for
9948 "statement without effect" and the like, caused by removing the
9949 call node earlier than the warning is generated. */
9951 tree
9952 fold_call_expr (location_t loc, tree exp, bool ignore)
9954 tree ret = NULL_TREE;
9955 tree fndecl = get_callee_fndecl (exp);
9956 if (fndecl && fndecl_built_in_p (fndecl)
9957 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9958 yet. Defer folding until we see all the arguments
9959 (after inlining). */
9960 && !CALL_EXPR_VA_ARG_PACK (exp))
9962 int nargs = call_expr_nargs (exp);
9964 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9965 instead last argument is __builtin_va_arg_pack (). Defer folding
9966 even in that case, until arguments are finalized. */
9967 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9969 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9970 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9971 return NULL_TREE;
9974 if (avoid_folding_inline_builtin (fndecl))
9975 return NULL_TREE;
9977 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9978 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9979 CALL_EXPR_ARGP (exp), ignore);
9980 else
9982 tree *args = CALL_EXPR_ARGP (exp);
9983 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9984 if (ret)
9985 return ret;
9988 return NULL_TREE;
9991 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9992 N arguments are passed in the array ARGARRAY. Return a folded
9993 expression or NULL_TREE if no simplification was possible. */
9995 tree
9996 fold_builtin_call_array (location_t loc, tree,
9997 tree fn,
9998 int n,
9999 tree *argarray)
10001 if (TREE_CODE (fn) != ADDR_EXPR)
10002 return NULL_TREE;
10004 tree fndecl = TREE_OPERAND (fn, 0);
10005 if (TREE_CODE (fndecl) == FUNCTION_DECL
10006 && fndecl_built_in_p (fndecl))
10008 /* If last argument is __builtin_va_arg_pack (), arguments to this
10009 function are not finalized yet. Defer folding until they are. */
10010 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10012 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10013 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10014 return NULL_TREE;
10016 if (avoid_folding_inline_builtin (fndecl))
10017 return NULL_TREE;
10018 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10019 return targetm.fold_builtin (fndecl, n, argarray, false);
10020 else
10021 return fold_builtin_n (loc, fndecl, argarray, n, false);
10024 return NULL_TREE;
10027 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10028 along with N new arguments specified as the "..." parameters. SKIP
10029 is the number of arguments in EXP to be omitted. This function is used
10030 to do varargs-to-varargs transformations. */
10032 static tree
10033 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10035 va_list ap;
10036 tree t;
10038 va_start (ap, n);
10039 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10040 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10041 va_end (ap);
10043 return t;
10046 /* Validate a single argument ARG against a tree code CODE representing
10047 a type. Return true when argument is valid. */
10049 static bool
10050 validate_arg (const_tree arg, enum tree_code code)
10052 if (!arg)
10053 return false;
10054 else if (code == POINTER_TYPE)
10055 return POINTER_TYPE_P (TREE_TYPE (arg));
10056 else if (code == INTEGER_TYPE)
10057 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10058 return code == TREE_CODE (TREE_TYPE (arg));
10061 /* This function validates the types of a function call argument list
10062 against a specified list of tree_codes. If the last specifier is a 0,
10063 that represents an ellipses, otherwise the last specifier must be a
10064 VOID_TYPE.
10066 This is the GIMPLE version of validate_arglist. Eventually we want to
10067 completely convert builtins.c to work from GIMPLEs and the tree based
10068 validate_arglist will then be removed. */
10070 bool
10071 validate_gimple_arglist (const gcall *call, ...)
10073 enum tree_code code;
10074 bool res = 0;
10075 va_list ap;
10076 const_tree arg;
10077 size_t i;
10079 va_start (ap, call);
10080 i = 0;
10084 code = (enum tree_code) va_arg (ap, int);
10085 switch (code)
10087 case 0:
10088 /* This signifies an ellipses, any further arguments are all ok. */
10089 res = true;
10090 goto end;
10091 case VOID_TYPE:
10092 /* This signifies an endlink, if no arguments remain, return
10093 true, otherwise return false. */
10094 res = (i == gimple_call_num_args (call));
10095 goto end;
10096 default:
10097 /* If no parameters remain or the parameter's code does not
10098 match the specified code, return false. Otherwise continue
10099 checking any remaining arguments. */
10100 arg = gimple_call_arg (call, i++);
10101 if (!validate_arg (arg, code))
10102 goto end;
10103 break;
10106 while (1);
10108 /* We need gotos here since we can only have one VA_CLOSE in a
10109 function. */
10110 end: ;
10111 va_end (ap);
10113 return res;
10116 /* Default target-specific builtin expander that does nothing. */
10119 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10120 rtx target ATTRIBUTE_UNUSED,
10121 rtx subtarget ATTRIBUTE_UNUSED,
10122 machine_mode mode ATTRIBUTE_UNUSED,
10123 int ignore ATTRIBUTE_UNUSED)
10125 return NULL_RTX;
10128 /* Returns true is EXP represents data that would potentially reside
10129 in a readonly section. */
10131 bool
10132 readonly_data_expr (tree exp)
10134 STRIP_NOPS (exp);
10136 if (TREE_CODE (exp) != ADDR_EXPR)
10137 return false;
10139 exp = get_base_address (TREE_OPERAND (exp, 0));
10140 if (!exp)
10141 return false;
10143 /* Make sure we call decl_readonly_section only for trees it
10144 can handle (since it returns true for everything it doesn't
10145 understand). */
10146 if (TREE_CODE (exp) == STRING_CST
10147 || TREE_CODE (exp) == CONSTRUCTOR
10148 || (VAR_P (exp) && TREE_STATIC (exp)))
10149 return decl_readonly_section (exp, 0);
10150 else
10151 return false;
10154 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10155 to the call, and TYPE is its return type.
10157 Return NULL_TREE if no simplification was possible, otherwise return the
10158 simplified form of the call as a tree.
10160 The simplified form may be a constant or other expression which
10161 computes the same value, but in a more efficient manner (including
10162 calls to other builtin functions).
10164 The call may contain arguments which need to be evaluated, but
10165 which are not useful to determine the result of the call. In
10166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10167 COMPOUND_EXPR will be an argument which must be evaluated.
10168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10169 COMPOUND_EXPR in the chain will contain the tree for the simplified
10170 form of the builtin function call. */
10172 static tree
10173 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10175 if (!validate_arg (s1, POINTER_TYPE)
10176 || !validate_arg (s2, POINTER_TYPE))
10177 return NULL_TREE;
10178 else
10180 tree fn;
10181 const char *p1, *p2;
10183 p2 = c_getstr (s2);
10184 if (p2 == NULL)
10185 return NULL_TREE;
10187 p1 = c_getstr (s1);
10188 if (p1 != NULL)
10190 const char *r = strpbrk (p1, p2);
10191 tree tem;
10193 if (r == NULL)
10194 return build_int_cst (TREE_TYPE (s1), 0);
10196 /* Return an offset into the constant string argument. */
10197 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10198 return fold_convert_loc (loc, type, tem);
10201 if (p2[0] == '\0')
10202 /* strpbrk(x, "") == NULL.
10203 Evaluate and ignore s1 in case it had side-effects. */
10204 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10206 if (p2[1] != '\0')
10207 return NULL_TREE; /* Really call strpbrk. */
10209 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10210 if (!fn)
10211 return NULL_TREE;
10213 /* New argument list transforming strpbrk(s1, s2) to
10214 strchr(s1, s2[0]). */
10215 return build_call_expr_loc (loc, fn, 2, s1,
10216 build_int_cst (integer_type_node, p2[0]));
10220 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10221 to the call.
10223 Return NULL_TREE if no simplification was possible, otherwise return the
10224 simplified form of the call as a tree.
10226 The simplified form may be a constant or other expression which
10227 computes the same value, but in a more efficient manner (including
10228 calls to other builtin functions).
10230 The call may contain arguments which need to be evaluated, but
10231 which are not useful to determine the result of the call. In
10232 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10233 COMPOUND_EXPR will be an argument which must be evaluated.
10234 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10235 COMPOUND_EXPR in the chain will contain the tree for the simplified
10236 form of the builtin function call. */
10238 static tree
10239 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10241 if (!validate_arg (s1, POINTER_TYPE)
10242 || !validate_arg (s2, POINTER_TYPE))
10243 return NULL_TREE;
10244 else
10246 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10248 /* If either argument is "", return NULL_TREE. */
10249 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10250 /* Evaluate and ignore both arguments in case either one has
10251 side-effects. */
10252 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10253 s1, s2);
10254 return NULL_TREE;
10258 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10259 to the call.
10261 Return NULL_TREE if no simplification was possible, otherwise return the
10262 simplified form of the call as a tree.
10264 The simplified form may be a constant or other expression which
10265 computes the same value, but in a more efficient manner (including
10266 calls to other builtin functions).
10268 The call may contain arguments which need to be evaluated, but
10269 which are not useful to determine the result of the call. In
10270 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10271 COMPOUND_EXPR will be an argument which must be evaluated.
10272 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10273 COMPOUND_EXPR in the chain will contain the tree for the simplified
10274 form of the builtin function call. */
10276 static tree
10277 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10279 if (!validate_arg (s1, POINTER_TYPE)
10280 || !validate_arg (s2, POINTER_TYPE))
10281 return NULL_TREE;
10282 else
10284 /* If the first argument is "", return NULL_TREE. */
10285 const char *p1 = c_getstr (s1);
10286 if (p1 && *p1 == '\0')
10288 /* Evaluate and ignore argument s2 in case it has
10289 side-effects. */
10290 return omit_one_operand_loc (loc, size_type_node,
10291 size_zero_node, s2);
10294 /* If the second argument is "", return __builtin_strlen(s1). */
10295 const char *p2 = c_getstr (s2);
10296 if (p2 && *p2 == '\0')
10298 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10300 /* If the replacement _DECL isn't initialized, don't do the
10301 transformation. */
10302 if (!fn)
10303 return NULL_TREE;
10305 return build_call_expr_loc (loc, fn, 1, s1);
10307 return NULL_TREE;
10311 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10312 produced. False otherwise. This is done so that we don't output the error
10313 or warning twice or three times. */
10315 bool
10316 fold_builtin_next_arg (tree exp, bool va_start_p)
10318 tree fntype = TREE_TYPE (current_function_decl);
10319 int nargs = call_expr_nargs (exp);
10320 tree arg;
10321 /* There is good chance the current input_location points inside the
10322 definition of the va_start macro (perhaps on the token for
10323 builtin) in a system header, so warnings will not be emitted.
10324 Use the location in real source code. */
10325 location_t current_location =
10326 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10327 NULL);
10329 if (!stdarg_p (fntype))
10331 error ("%<va_start%> used in function with fixed arguments");
10332 return true;
10335 if (va_start_p)
10337 if (va_start_p && (nargs != 2))
10339 error ("wrong number of arguments to function %<va_start%>");
10340 return true;
10342 arg = CALL_EXPR_ARG (exp, 1);
10344 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10345 when we checked the arguments and if needed issued a warning. */
10346 else
10348 if (nargs == 0)
10350 /* Evidently an out of date version of <stdarg.h>; can't validate
10351 va_start's second argument, but can still work as intended. */
10352 warning_at (current_location,
10353 OPT_Wvarargs,
10354 "%<__builtin_next_arg%> called without an argument");
10355 return true;
10357 else if (nargs > 1)
10359 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10360 return true;
10362 arg = CALL_EXPR_ARG (exp, 0);
10365 if (TREE_CODE (arg) == SSA_NAME)
10366 arg = SSA_NAME_VAR (arg);
10368 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10369 or __builtin_next_arg (0) the first time we see it, after checking
10370 the arguments and if needed issuing a warning. */
10371 if (!integer_zerop (arg))
10373 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10375 /* Strip off all nops for the sake of the comparison. This
10376 is not quite the same as STRIP_NOPS. It does more.
10377 We must also strip off INDIRECT_EXPR for C++ reference
10378 parameters. */
10379 while (CONVERT_EXPR_P (arg)
10380 || TREE_CODE (arg) == INDIRECT_REF)
10381 arg = TREE_OPERAND (arg, 0);
10382 if (arg != last_parm)
10384 /* FIXME: Sometimes with the tree optimizers we can get the
10385 not the last argument even though the user used the last
10386 argument. We just warn and set the arg to be the last
10387 argument so that we will get wrong-code because of
10388 it. */
10389 warning_at (current_location,
10390 OPT_Wvarargs,
10391 "second parameter of %<va_start%> not last named argument");
10394 /* Undefined by C99 7.15.1.4p4 (va_start):
10395 "If the parameter parmN is declared with the register storage
10396 class, with a function or array type, or with a type that is
10397 not compatible with the type that results after application of
10398 the default argument promotions, the behavior is undefined."
10400 else if (DECL_REGISTER (arg))
10402 warning_at (current_location,
10403 OPT_Wvarargs,
10404 "undefined behavior when second parameter of "
10405 "%<va_start%> is declared with %<register%> storage");
10408 /* We want to verify the second parameter just once before the tree
10409 optimizers are run and then avoid keeping it in the tree,
10410 as otherwise we could warn even for correct code like:
10411 void foo (int i, ...)
10412 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10413 if (va_start_p)
10414 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10415 else
10416 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10418 return false;
10422 /* Expand a call EXP to __builtin_object_size. */
10424 static rtx
10425 expand_builtin_object_size (tree exp)
10427 tree ost;
10428 int object_size_type;
10429 tree fndecl = get_callee_fndecl (exp);
10431 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10433 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10434 exp, fndecl);
10435 expand_builtin_trap ();
10436 return const0_rtx;
10439 ost = CALL_EXPR_ARG (exp, 1);
10440 STRIP_NOPS (ost);
10442 if (TREE_CODE (ost) != INTEGER_CST
10443 || tree_int_cst_sgn (ost) < 0
10444 || compare_tree_int (ost, 3) > 0)
10446 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10447 exp, fndecl);
10448 expand_builtin_trap ();
10449 return const0_rtx;
10452 object_size_type = tree_to_shwi (ost);
10454 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10457 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10458 FCODE is the BUILT_IN_* to use.
10459 Return NULL_RTX if we failed; the caller should emit a normal call,
10460 otherwise try to get the result in TARGET, if convenient (and in
10461 mode MODE if that's convenient). */
10463 static rtx
10464 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10465 enum built_in_function fcode)
10467 if (!validate_arglist (exp,
10468 POINTER_TYPE,
10469 fcode == BUILT_IN_MEMSET_CHK
10470 ? INTEGER_TYPE : POINTER_TYPE,
10471 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10472 return NULL_RTX;
10474 tree dest = CALL_EXPR_ARG (exp, 0);
10475 tree src = CALL_EXPR_ARG (exp, 1);
10476 tree len = CALL_EXPR_ARG (exp, 2);
10477 tree size = CALL_EXPR_ARG (exp, 3);
10479 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10480 /*str=*/NULL_TREE, size);
10482 if (!tree_fits_uhwi_p (size))
10483 return NULL_RTX;
10485 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10487 /* Avoid transforming the checking call to an ordinary one when
10488 an overflow has been detected or when the call couldn't be
10489 validated because the size is not constant. */
10490 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10491 return NULL_RTX;
10493 tree fn = NULL_TREE;
10494 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10495 mem{cpy,pcpy,move,set} is available. */
10496 switch (fcode)
10498 case BUILT_IN_MEMCPY_CHK:
10499 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10500 break;
10501 case BUILT_IN_MEMPCPY_CHK:
10502 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10503 break;
10504 case BUILT_IN_MEMMOVE_CHK:
10505 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10506 break;
10507 case BUILT_IN_MEMSET_CHK:
10508 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10509 break;
10510 default:
10511 break;
10514 if (! fn)
10515 return NULL_RTX;
10517 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10518 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10519 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10520 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10522 else if (fcode == BUILT_IN_MEMSET_CHK)
10523 return NULL_RTX;
10524 else
10526 unsigned int dest_align = get_pointer_alignment (dest);
10528 /* If DEST is not a pointer type, call the normal function. */
10529 if (dest_align == 0)
10530 return NULL_RTX;
10532 /* If SRC and DEST are the same (and not volatile), do nothing. */
10533 if (operand_equal_p (src, dest, 0))
10535 tree expr;
10537 if (fcode != BUILT_IN_MEMPCPY_CHK)
10539 /* Evaluate and ignore LEN in case it has side-effects. */
10540 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10541 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10544 expr = fold_build_pointer_plus (dest, len);
10545 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10548 /* __memmove_chk special case. */
10549 if (fcode == BUILT_IN_MEMMOVE_CHK)
10551 unsigned int src_align = get_pointer_alignment (src);
10553 if (src_align == 0)
10554 return NULL_RTX;
10556 /* If src is categorized for a readonly section we can use
10557 normal __memcpy_chk. */
10558 if (readonly_data_expr (src))
10560 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10561 if (!fn)
10562 return NULL_RTX;
10563 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10564 dest, src, len, size);
10565 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10566 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10567 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10570 return NULL_RTX;
10574 /* Emit warning if a buffer overflow is detected at compile time. */
10576 static void
10577 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10579 /* The source string. */
10580 tree srcstr = NULL_TREE;
10581 /* The size of the destination object. */
10582 tree objsize = NULL_TREE;
10583 /* The string that is being concatenated with (as in __strcat_chk)
10584 or null if it isn't. */
10585 tree catstr = NULL_TREE;
10586 /* The maximum length of the source sequence in a bounded operation
10587 (such as __strncat_chk) or null if the operation isn't bounded
10588 (such as __strcat_chk). */
10589 tree maxread = NULL_TREE;
10590 /* The exact size of the access (such as in __strncpy_chk). */
10591 tree size = NULL_TREE;
10593 switch (fcode)
10595 case BUILT_IN_STRCPY_CHK:
10596 case BUILT_IN_STPCPY_CHK:
10597 srcstr = CALL_EXPR_ARG (exp, 1);
10598 objsize = CALL_EXPR_ARG (exp, 2);
10599 break;
10601 case BUILT_IN_STRCAT_CHK:
10602 /* For __strcat_chk the warning will be emitted only if overflowing
10603 by at least strlen (dest) + 1 bytes. */
10604 catstr = CALL_EXPR_ARG (exp, 0);
10605 srcstr = CALL_EXPR_ARG (exp, 1);
10606 objsize = CALL_EXPR_ARG (exp, 2);
10607 break;
10609 case BUILT_IN_STRNCAT_CHK:
10610 catstr = CALL_EXPR_ARG (exp, 0);
10611 srcstr = CALL_EXPR_ARG (exp, 1);
10612 maxread = CALL_EXPR_ARG (exp, 2);
10613 objsize = CALL_EXPR_ARG (exp, 3);
10614 break;
10616 case BUILT_IN_STRNCPY_CHK:
10617 case BUILT_IN_STPNCPY_CHK:
10618 srcstr = CALL_EXPR_ARG (exp, 1);
10619 size = CALL_EXPR_ARG (exp, 2);
10620 objsize = CALL_EXPR_ARG (exp, 3);
10621 break;
10623 case BUILT_IN_SNPRINTF_CHK:
10624 case BUILT_IN_VSNPRINTF_CHK:
10625 maxread = CALL_EXPR_ARG (exp, 1);
10626 objsize = CALL_EXPR_ARG (exp, 3);
10627 break;
10628 default:
10629 gcc_unreachable ();
10632 if (catstr && maxread)
10634 /* Check __strncat_chk. There is no way to determine the length
10635 of the string to which the source string is being appended so
10636 just warn when the length of the source string is not known. */
10637 check_strncat_sizes (exp, objsize);
10638 return;
10641 /* The destination argument is the first one for all built-ins above. */
10642 tree dst = CALL_EXPR_ARG (exp, 0);
10644 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10647 /* Emit warning if a buffer overflow is detected at compile time
10648 in __sprintf_chk/__vsprintf_chk calls. */
10650 static void
10651 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10653 tree size, len, fmt;
10654 const char *fmt_str;
10655 int nargs = call_expr_nargs (exp);
10657 /* Verify the required arguments in the original call. */
10659 if (nargs < 4)
10660 return;
10661 size = CALL_EXPR_ARG (exp, 2);
10662 fmt = CALL_EXPR_ARG (exp, 3);
10664 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10665 return;
10667 /* Check whether the format is a literal string constant. */
10668 fmt_str = c_getstr (fmt);
10669 if (fmt_str == NULL)
10670 return;
10672 if (!init_target_chars ())
10673 return;
10675 /* If the format doesn't contain % args or %%, we know its size. */
10676 if (strchr (fmt_str, target_percent) == 0)
10677 len = build_int_cstu (size_type_node, strlen (fmt_str));
10678 /* If the format is "%s" and first ... argument is a string literal,
10679 we know it too. */
10680 else if (fcode == BUILT_IN_SPRINTF_CHK
10681 && strcmp (fmt_str, target_percent_s) == 0)
10683 tree arg;
10685 if (nargs < 5)
10686 return;
10687 arg = CALL_EXPR_ARG (exp, 4);
10688 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10689 return;
10691 len = c_strlen (arg, 1);
10692 if (!len || ! tree_fits_uhwi_p (len))
10693 return;
10695 else
10696 return;
10698 /* Add one for the terminating nul. */
10699 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10701 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10702 /*maxread=*/NULL_TREE, len, size);
10705 /* Emit warning if a free is called with address of a variable. */
10707 static void
10708 maybe_emit_free_warning (tree exp)
10710 if (call_expr_nargs (exp) != 1)
10711 return;
10713 tree arg = CALL_EXPR_ARG (exp, 0);
10715 STRIP_NOPS (arg);
10716 if (TREE_CODE (arg) != ADDR_EXPR)
10717 return;
10719 arg = get_base_address (TREE_OPERAND (arg, 0));
10720 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10721 return;
10723 if (SSA_VAR_P (arg))
10724 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10725 "%Kattempt to free a non-heap object %qD", exp, arg);
10726 else
10727 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10728 "%Kattempt to free a non-heap object", exp);
10731 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10732 if possible. */
10734 static tree
10735 fold_builtin_object_size (tree ptr, tree ost)
10737 unsigned HOST_WIDE_INT bytes;
10738 int object_size_type;
10740 if (!validate_arg (ptr, POINTER_TYPE)
10741 || !validate_arg (ost, INTEGER_TYPE))
10742 return NULL_TREE;
10744 STRIP_NOPS (ost);
10746 if (TREE_CODE (ost) != INTEGER_CST
10747 || tree_int_cst_sgn (ost) < 0
10748 || compare_tree_int (ost, 3) > 0)
10749 return NULL_TREE;
10751 object_size_type = tree_to_shwi (ost);
10753 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10754 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10755 and (size_t) 0 for types 2 and 3. */
10756 if (TREE_SIDE_EFFECTS (ptr))
10757 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10759 if (TREE_CODE (ptr) == ADDR_EXPR)
10761 compute_builtin_object_size (ptr, object_size_type, &bytes);
10762 if (wi::fits_to_tree_p (bytes, size_type_node))
10763 return build_int_cstu (size_type_node, bytes);
10765 else if (TREE_CODE (ptr) == SSA_NAME)
10767 /* If object size is not known yet, delay folding until
10768 later. Maybe subsequent passes will help determining
10769 it. */
10770 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10771 && wi::fits_to_tree_p (bytes, size_type_node))
10772 return build_int_cstu (size_type_node, bytes);
10775 return NULL_TREE;
10778 /* Builtins with folding operations that operate on "..." arguments
10779 need special handling; we need to store the arguments in a convenient
10780 data structure before attempting any folding. Fortunately there are
10781 only a few builtins that fall into this category. FNDECL is the
10782 function, EXP is the CALL_EXPR for the call. */
10784 static tree
10785 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10787 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10788 tree ret = NULL_TREE;
10790 switch (fcode)
10792 case BUILT_IN_FPCLASSIFY:
10793 ret = fold_builtin_fpclassify (loc, args, nargs);
10794 break;
10796 default:
10797 break;
10799 if (ret)
10801 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10802 SET_EXPR_LOCATION (ret, loc);
10803 TREE_NO_WARNING (ret) = 1;
10804 return ret;
10806 return NULL_TREE;
10809 /* Initialize format string characters in the target charset. */
10811 bool
10812 init_target_chars (void)
10814 static bool init;
10815 if (!init)
10817 target_newline = lang_hooks.to_target_charset ('\n');
10818 target_percent = lang_hooks.to_target_charset ('%');
10819 target_c = lang_hooks.to_target_charset ('c');
10820 target_s = lang_hooks.to_target_charset ('s');
10821 if (target_newline == 0 || target_percent == 0 || target_c == 0
10822 || target_s == 0)
10823 return false;
10825 target_percent_c[0] = target_percent;
10826 target_percent_c[1] = target_c;
10827 target_percent_c[2] = '\0';
10829 target_percent_s[0] = target_percent;
10830 target_percent_s[1] = target_s;
10831 target_percent_s[2] = '\0';
10833 target_percent_s_newline[0] = target_percent;
10834 target_percent_s_newline[1] = target_s;
10835 target_percent_s_newline[2] = target_newline;
10836 target_percent_s_newline[3] = '\0';
10838 init = true;
10840 return true;
10843 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10844 and no overflow/underflow occurred. INEXACT is true if M was not
10845 exactly calculated. TYPE is the tree type for the result. This
10846 function assumes that you cleared the MPFR flags and then
10847 calculated M to see if anything subsequently set a flag prior to
10848 entering this function. Return NULL_TREE if any checks fail. */
10850 static tree
10851 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10853 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10854 overflow/underflow occurred. If -frounding-math, proceed iff the
10855 result of calling FUNC was exact. */
10856 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10857 && (!flag_rounding_math || !inexact))
10859 REAL_VALUE_TYPE rr;
10861 real_from_mpfr (&rr, m, type, GMP_RNDN);
10862 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10863 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10864 but the mpft_t is not, then we underflowed in the
10865 conversion. */
10866 if (real_isfinite (&rr)
10867 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10869 REAL_VALUE_TYPE rmode;
10871 real_convert (&rmode, TYPE_MODE (type), &rr);
10872 /* Proceed iff the specified mode can hold the value. */
10873 if (real_identical (&rmode, &rr))
10874 return build_real (type, rmode);
10877 return NULL_TREE;
10880 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10881 number and no overflow/underflow occurred. INEXACT is true if M
10882 was not exactly calculated. TYPE is the tree type for the result.
10883 This function assumes that you cleared the MPFR flags and then
10884 calculated M to see if anything subsequently set a flag prior to
10885 entering this function. Return NULL_TREE if any checks fail, if
10886 FORCE_CONVERT is true, then bypass the checks. */
10888 static tree
10889 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10891 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10892 overflow/underflow occurred. If -frounding-math, proceed iff the
10893 result of calling FUNC was exact. */
10894 if (force_convert
10895 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10896 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10897 && (!flag_rounding_math || !inexact)))
10899 REAL_VALUE_TYPE re, im;
10901 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10902 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10903 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10904 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10905 but the mpft_t is not, then we underflowed in the
10906 conversion. */
10907 if (force_convert
10908 || (real_isfinite (&re) && real_isfinite (&im)
10909 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10910 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10912 REAL_VALUE_TYPE re_mode, im_mode;
10914 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10915 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10916 /* Proceed iff the specified mode can hold the value. */
10917 if (force_convert
10918 || (real_identical (&re_mode, &re)
10919 && real_identical (&im_mode, &im)))
10920 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10921 build_real (TREE_TYPE (type), im_mode));
10924 return NULL_TREE;
10927 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10928 the pointer *(ARG_QUO) and return the result. The type is taken
10929 from the type of ARG0 and is used for setting the precision of the
10930 calculation and results. */
10932 static tree
10933 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10935 tree const type = TREE_TYPE (arg0);
10936 tree result = NULL_TREE;
10938 STRIP_NOPS (arg0);
10939 STRIP_NOPS (arg1);
10941 /* To proceed, MPFR must exactly represent the target floating point
10942 format, which only happens when the target base equals two. */
10943 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10944 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10945 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10947 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10948 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10950 if (real_isfinite (ra0) && real_isfinite (ra1))
10952 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10953 const int prec = fmt->p;
10954 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10955 tree result_rem;
10956 long integer_quo;
10957 mpfr_t m0, m1;
10959 mpfr_inits2 (prec, m0, m1, NULL);
10960 mpfr_from_real (m0, ra0, GMP_RNDN);
10961 mpfr_from_real (m1, ra1, GMP_RNDN);
10962 mpfr_clear_flags ();
10963 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10964 /* Remquo is independent of the rounding mode, so pass
10965 inexact=0 to do_mpfr_ckconv(). */
10966 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10967 mpfr_clears (m0, m1, NULL);
10968 if (result_rem)
10970 /* MPFR calculates quo in the host's long so it may
10971 return more bits in quo than the target int can hold
10972 if sizeof(host long) > sizeof(target int). This can
10973 happen even for native compilers in LP64 mode. In
10974 these cases, modulo the quo value with the largest
10975 number that the target int can hold while leaving one
10976 bit for the sign. */
10977 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10978 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10980 /* Dereference the quo pointer argument. */
10981 arg_quo = build_fold_indirect_ref (arg_quo);
10982 /* Proceed iff a valid pointer type was passed in. */
10983 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10985 /* Set the value. */
10986 tree result_quo
10987 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10988 build_int_cst (TREE_TYPE (arg_quo),
10989 integer_quo));
10990 TREE_SIDE_EFFECTS (result_quo) = 1;
10991 /* Combine the quo assignment with the rem. */
10992 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10993 result_quo, result_rem));
10998 return result;
11001 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11002 resulting value as a tree with type TYPE. The mpfr precision is
11003 set to the precision of TYPE. We assume that this mpfr function
11004 returns zero if the result could be calculated exactly within the
11005 requested precision. In addition, the integer pointer represented
11006 by ARG_SG will be dereferenced and set to the appropriate signgam
11007 (-1,1) value. */
11009 static tree
11010 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11012 tree result = NULL_TREE;
11014 STRIP_NOPS (arg);
11016 /* To proceed, MPFR must exactly represent the target floating point
11017 format, which only happens when the target base equals two. Also
11018 verify ARG is a constant and that ARG_SG is an int pointer. */
11019 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11020 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11021 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11022 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11024 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11026 /* In addition to NaN and Inf, the argument cannot be zero or a
11027 negative integer. */
11028 if (real_isfinite (ra)
11029 && ra->cl != rvc_zero
11030 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11032 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11033 const int prec = fmt->p;
11034 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11035 int inexact, sg;
11036 mpfr_t m;
11037 tree result_lg;
11039 mpfr_init2 (m, prec);
11040 mpfr_from_real (m, ra, GMP_RNDN);
11041 mpfr_clear_flags ();
11042 inexact = mpfr_lgamma (m, &sg, m, rnd);
11043 result_lg = do_mpfr_ckconv (m, type, inexact);
11044 mpfr_clear (m);
11045 if (result_lg)
11047 tree result_sg;
11049 /* Dereference the arg_sg pointer argument. */
11050 arg_sg = build_fold_indirect_ref (arg_sg);
11051 /* Assign the signgam value into *arg_sg. */
11052 result_sg = fold_build2 (MODIFY_EXPR,
11053 TREE_TYPE (arg_sg), arg_sg,
11054 build_int_cst (TREE_TYPE (arg_sg), sg));
11055 TREE_SIDE_EFFECTS (result_sg) = 1;
11056 /* Combine the signgam assignment with the lgamma result. */
11057 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11058 result_sg, result_lg));
11063 return result;
11066 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11067 mpc function FUNC on it and return the resulting value as a tree
11068 with type TYPE. The mpfr precision is set to the precision of
11069 TYPE. We assume that function FUNC returns zero if the result
11070 could be calculated exactly within the requested precision. If
11071 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11072 in the arguments and/or results. */
11074 tree
11075 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11076 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11078 tree result = NULL_TREE;
11080 STRIP_NOPS (arg0);
11081 STRIP_NOPS (arg1);
11083 /* To proceed, MPFR must exactly represent the target floating point
11084 format, which only happens when the target base equals two. */
11085 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11087 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11089 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11091 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11092 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11093 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11094 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11096 if (do_nonfinite
11097 || (real_isfinite (re0) && real_isfinite (im0)
11098 && real_isfinite (re1) && real_isfinite (im1)))
11100 const struct real_format *const fmt =
11101 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11102 const int prec = fmt->p;
11103 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11104 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11105 int inexact;
11106 mpc_t m0, m1;
11108 mpc_init2 (m0, prec);
11109 mpc_init2 (m1, prec);
11110 mpfr_from_real (mpc_realref (m0), re0, rnd);
11111 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11112 mpfr_from_real (mpc_realref (m1), re1, rnd);
11113 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11114 mpfr_clear_flags ();
11115 inexact = func (m0, m0, m1, crnd);
11116 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11117 mpc_clear (m0);
11118 mpc_clear (m1);
11122 return result;
11125 /* A wrapper function for builtin folding that prevents warnings for
11126 "statement without effect" and the like, caused by removing the
11127 call node earlier than the warning is generated. */
11129 tree
11130 fold_call_stmt (gcall *stmt, bool ignore)
11132 tree ret = NULL_TREE;
11133 tree fndecl = gimple_call_fndecl (stmt);
11134 location_t loc = gimple_location (stmt);
11135 if (fndecl && fndecl_built_in_p (fndecl)
11136 && !gimple_call_va_arg_pack_p (stmt))
11138 int nargs = gimple_call_num_args (stmt);
11139 tree *args = (nargs > 0
11140 ? gimple_call_arg_ptr (stmt, 0)
11141 : &error_mark_node);
11143 if (avoid_folding_inline_builtin (fndecl))
11144 return NULL_TREE;
11145 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11147 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11149 else
11151 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11152 if (ret)
11154 /* Propagate location information from original call to
11155 expansion of builtin. Otherwise things like
11156 maybe_emit_chk_warning, that operate on the expansion
11157 of a builtin, will use the wrong location information. */
11158 if (gimple_has_location (stmt))
11160 tree realret = ret;
11161 if (TREE_CODE (ret) == NOP_EXPR)
11162 realret = TREE_OPERAND (ret, 0);
11163 if (CAN_HAVE_LOCATION_P (realret)
11164 && !EXPR_HAS_LOCATION (realret))
11165 SET_EXPR_LOCATION (realret, loc);
11166 return realret;
11168 return ret;
11172 return NULL_TREE;
11175 /* Look up the function in builtin_decl that corresponds to DECL
11176 and set ASMSPEC as its user assembler name. DECL must be a
11177 function decl that declares a builtin. */
11179 void
11180 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11182 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11183 && asmspec != 0);
11185 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11186 set_user_assembler_name (builtin, asmspec);
11188 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11189 && INT_TYPE_SIZE < BITS_PER_WORD)
11191 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11192 set_user_assembler_libfunc ("ffs", asmspec);
11193 set_optab_libfunc (ffs_optab, mode, "ffs");
11197 /* Return true if DECL is a builtin that expands to a constant or similarly
11198 simple code. */
11199 bool
11200 is_simple_builtin (tree decl)
11202 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11203 switch (DECL_FUNCTION_CODE (decl))
11205 /* Builtins that expand to constants. */
11206 case BUILT_IN_CONSTANT_P:
11207 case BUILT_IN_EXPECT:
11208 case BUILT_IN_OBJECT_SIZE:
11209 case BUILT_IN_UNREACHABLE:
11210 /* Simple register moves or loads from stack. */
11211 case BUILT_IN_ASSUME_ALIGNED:
11212 case BUILT_IN_RETURN_ADDRESS:
11213 case BUILT_IN_EXTRACT_RETURN_ADDR:
11214 case BUILT_IN_FROB_RETURN_ADDR:
11215 case BUILT_IN_RETURN:
11216 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11217 case BUILT_IN_FRAME_ADDRESS:
11218 case BUILT_IN_VA_END:
11219 case BUILT_IN_STACK_SAVE:
11220 case BUILT_IN_STACK_RESTORE:
11221 /* Exception state returns or moves registers around. */
11222 case BUILT_IN_EH_FILTER:
11223 case BUILT_IN_EH_POINTER:
11224 case BUILT_IN_EH_COPY_VALUES:
11225 return true;
11227 default:
11228 return false;
11231 return false;
11234 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11235 most probably expanded inline into reasonably simple code. This is a
11236 superset of is_simple_builtin. */
11237 bool
11238 is_inexpensive_builtin (tree decl)
11240 if (!decl)
11241 return false;
11242 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11243 return true;
11244 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11245 switch (DECL_FUNCTION_CODE (decl))
11247 case BUILT_IN_ABS:
11248 CASE_BUILT_IN_ALLOCA:
11249 case BUILT_IN_BSWAP16:
11250 case BUILT_IN_BSWAP32:
11251 case BUILT_IN_BSWAP64:
11252 case BUILT_IN_CLZ:
11253 case BUILT_IN_CLZIMAX:
11254 case BUILT_IN_CLZL:
11255 case BUILT_IN_CLZLL:
11256 case BUILT_IN_CTZ:
11257 case BUILT_IN_CTZIMAX:
11258 case BUILT_IN_CTZL:
11259 case BUILT_IN_CTZLL:
11260 case BUILT_IN_FFS:
11261 case BUILT_IN_FFSIMAX:
11262 case BUILT_IN_FFSL:
11263 case BUILT_IN_FFSLL:
11264 case BUILT_IN_IMAXABS:
11265 case BUILT_IN_FINITE:
11266 case BUILT_IN_FINITEF:
11267 case BUILT_IN_FINITEL:
11268 case BUILT_IN_FINITED32:
11269 case BUILT_IN_FINITED64:
11270 case BUILT_IN_FINITED128:
11271 case BUILT_IN_FPCLASSIFY:
11272 case BUILT_IN_ISFINITE:
11273 case BUILT_IN_ISINF_SIGN:
11274 case BUILT_IN_ISINF:
11275 case BUILT_IN_ISINFF:
11276 case BUILT_IN_ISINFL:
11277 case BUILT_IN_ISINFD32:
11278 case BUILT_IN_ISINFD64:
11279 case BUILT_IN_ISINFD128:
11280 case BUILT_IN_ISNAN:
11281 case BUILT_IN_ISNANF:
11282 case BUILT_IN_ISNANL:
11283 case BUILT_IN_ISNAND32:
11284 case BUILT_IN_ISNAND64:
11285 case BUILT_IN_ISNAND128:
11286 case BUILT_IN_ISNORMAL:
11287 case BUILT_IN_ISGREATER:
11288 case BUILT_IN_ISGREATEREQUAL:
11289 case BUILT_IN_ISLESS:
11290 case BUILT_IN_ISLESSEQUAL:
11291 case BUILT_IN_ISLESSGREATER:
11292 case BUILT_IN_ISUNORDERED:
11293 case BUILT_IN_VA_ARG_PACK:
11294 case BUILT_IN_VA_ARG_PACK_LEN:
11295 case BUILT_IN_VA_COPY:
11296 case BUILT_IN_TRAP:
11297 case BUILT_IN_SAVEREGS:
11298 case BUILT_IN_POPCOUNTL:
11299 case BUILT_IN_POPCOUNTLL:
11300 case BUILT_IN_POPCOUNTIMAX:
11301 case BUILT_IN_POPCOUNT:
11302 case BUILT_IN_PARITYL:
11303 case BUILT_IN_PARITYLL:
11304 case BUILT_IN_PARITYIMAX:
11305 case BUILT_IN_PARITY:
11306 case BUILT_IN_LABS:
11307 case BUILT_IN_LLABS:
11308 case BUILT_IN_PREFETCH:
11309 case BUILT_IN_ACC_ON_DEVICE:
11310 return true;
11312 default:
11313 return is_simple_builtin (decl);
11316 return false;
11319 /* Return true if T is a constant and the value cast to a target char
11320 can be represented by a host char.
11321 Store the casted char constant in *P if so. */
11323 bool
11324 target_char_cst_p (tree t, char *p)
11326 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11327 return false;
11329 *p = (char)tree_to_uhwi (t);
11330 return true;
11333 /* Return true if the builtin DECL is implemented in a standard library.
11334 Otherwise returns false which doesn't guarantee it is not (thus the list of
11335 handled builtins below may be incomplete). */
11337 bool
11338 builtin_with_linkage_p (tree decl)
11340 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11341 switch (DECL_FUNCTION_CODE (decl))
11343 CASE_FLT_FN (BUILT_IN_ACOS):
11344 CASE_FLT_FN (BUILT_IN_ACOSH):
11345 CASE_FLT_FN (BUILT_IN_ASIN):
11346 CASE_FLT_FN (BUILT_IN_ASINH):
11347 CASE_FLT_FN (BUILT_IN_ATAN):
11348 CASE_FLT_FN (BUILT_IN_ATANH):
11349 CASE_FLT_FN (BUILT_IN_ATAN2):
11350 CASE_FLT_FN (BUILT_IN_CBRT):
11351 CASE_FLT_FN (BUILT_IN_CEIL):
11352 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11353 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11354 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11355 CASE_FLT_FN (BUILT_IN_COS):
11356 CASE_FLT_FN (BUILT_IN_COSH):
11357 CASE_FLT_FN (BUILT_IN_ERF):
11358 CASE_FLT_FN (BUILT_IN_ERFC):
11359 CASE_FLT_FN (BUILT_IN_EXP):
11360 CASE_FLT_FN (BUILT_IN_EXP2):
11361 CASE_FLT_FN (BUILT_IN_EXPM1):
11362 CASE_FLT_FN (BUILT_IN_FABS):
11363 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11364 CASE_FLT_FN (BUILT_IN_FDIM):
11365 CASE_FLT_FN (BUILT_IN_FLOOR):
11366 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11367 CASE_FLT_FN (BUILT_IN_FMA):
11368 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11369 CASE_FLT_FN (BUILT_IN_FMAX):
11370 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11371 CASE_FLT_FN (BUILT_IN_FMIN):
11372 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11373 CASE_FLT_FN (BUILT_IN_FMOD):
11374 CASE_FLT_FN (BUILT_IN_FREXP):
11375 CASE_FLT_FN (BUILT_IN_HYPOT):
11376 CASE_FLT_FN (BUILT_IN_ILOGB):
11377 CASE_FLT_FN (BUILT_IN_LDEXP):
11378 CASE_FLT_FN (BUILT_IN_LGAMMA):
11379 CASE_FLT_FN (BUILT_IN_LLRINT):
11380 CASE_FLT_FN (BUILT_IN_LLROUND):
11381 CASE_FLT_FN (BUILT_IN_LOG):
11382 CASE_FLT_FN (BUILT_IN_LOG10):
11383 CASE_FLT_FN (BUILT_IN_LOG1P):
11384 CASE_FLT_FN (BUILT_IN_LOG2):
11385 CASE_FLT_FN (BUILT_IN_LOGB):
11386 CASE_FLT_FN (BUILT_IN_LRINT):
11387 CASE_FLT_FN (BUILT_IN_LROUND):
11388 CASE_FLT_FN (BUILT_IN_MODF):
11389 CASE_FLT_FN (BUILT_IN_NAN):
11390 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11391 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11392 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11393 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11394 CASE_FLT_FN (BUILT_IN_POW):
11395 CASE_FLT_FN (BUILT_IN_REMAINDER):
11396 CASE_FLT_FN (BUILT_IN_REMQUO):
11397 CASE_FLT_FN (BUILT_IN_RINT):
11398 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11399 CASE_FLT_FN (BUILT_IN_ROUND):
11400 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11401 CASE_FLT_FN (BUILT_IN_SCALBLN):
11402 CASE_FLT_FN (BUILT_IN_SCALBN):
11403 CASE_FLT_FN (BUILT_IN_SIN):
11404 CASE_FLT_FN (BUILT_IN_SINH):
11405 CASE_FLT_FN (BUILT_IN_SINCOS):
11406 CASE_FLT_FN (BUILT_IN_SQRT):
11407 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11408 CASE_FLT_FN (BUILT_IN_TAN):
11409 CASE_FLT_FN (BUILT_IN_TANH):
11410 CASE_FLT_FN (BUILT_IN_TGAMMA):
11411 CASE_FLT_FN (BUILT_IN_TRUNC):
11412 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11413 return true;
11414 default:
11415 break;
11417 return false;