2018-01-10 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blob1d6e69d30ce6a2d0506c42a9c9b2933a114502ab
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
210 /* Return true if DECL is a function symbol representing a built-in. */
212 bool
213 is_builtin_fn (tree decl)
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
222 bool
223 called_as_built_in (tree node)
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 poly_int64 bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep);
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
274 else if (TREE_CODE (exp) == LABEL_DECL)
276 else if (TREE_CODE (exp) == CONST_DECL)
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = targetm.constant_alignment (exp, align);
284 known_alignment = true;
286 else if (DECL_P (exp))
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
291 else if (TREE_CODE (exp) == INDIRECT_REF
292 || TREE_CODE (exp) == MEM_REF
293 || TREE_CODE (exp) == TARGET_MEM_REF)
295 tree addr = TREE_OPERAND (exp, 0);
296 unsigned ptr_align;
297 unsigned HOST_WIDE_INT ptr_bitpos;
298 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
305 ptr_bitmask *= BITS_PER_UNIT;
306 align = least_bit_hwi (ptr_bitmask);
307 addr = TREE_OPERAND (addr, 0);
310 known_alignment
311 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
312 align = MAX (ptr_align, align);
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos &= ptr_bitmask;
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 if (TMR_INDEX (exp))
323 unsigned HOST_WIDE_INT step = 1;
324 if (TMR_STEP (exp))
325 step = TREE_INT_CST_LOW (TMR_STEP (exp));
326 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 if (TMR_INDEX2 (exp))
329 align = BITS_PER_UNIT;
330 known_alignment = false;
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
338 unsigned int talign;
339 if (!addr_p && !known_alignment
340 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
341 && talign > align)
342 align = talign;
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = targetm.constant_alignment (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 /* Account for the alignment of runtime coefficients, so that the constant
377 bitpos is guaranteed to be accurate. */
378 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
379 if (alt_align != 0 && alt_align < align)
381 align = alt_align;
382 known_alignment = false;
385 *alignp = align;
386 *bitposp = bitpos.coeffs[0] & (align - 1);
387 return known_alignment;
390 /* For a memory reference expression EXP compute values M and N such that M
391 divides (&EXP - N) and such that N < M. If these numbers can be determined,
392 store M in alignp and N in *BITPOSP and return true. Otherwise return false
393 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395 bool
396 get_object_alignment_1 (tree exp, unsigned int *alignp,
397 unsigned HOST_WIDE_INT *bitposp)
399 return get_object_alignment_2 (exp, alignp, bitposp, false);
402 /* Return the alignment in bits of EXP, an object. */
404 unsigned int
405 get_object_alignment (tree exp)
407 unsigned HOST_WIDE_INT bitpos = 0;
408 unsigned int align;
410 get_object_alignment_1 (exp, &align, &bitpos);
412 /* align and bitpos now specify known low bits of the pointer.
413 ptr & (align - 1) == bitpos. */
415 if (bitpos != 0)
416 align = least_bit_hwi (bitpos);
417 return align;
420 /* For a pointer valued expression EXP compute values M and N such that M
421 divides (EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Return false if
423 the results are just a conservative approximation.
425 If EXP is not a pointer, false is returned too. */
427 bool
428 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
429 unsigned HOST_WIDE_INT *bitposp)
431 STRIP_NOPS (exp);
433 if (TREE_CODE (exp) == ADDR_EXPR)
434 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
435 alignp, bitposp, true);
436 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 unsigned int align;
439 unsigned HOST_WIDE_INT bitpos;
440 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
441 &align, &bitpos);
442 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
443 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
444 else
446 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
447 if (trailing_zeros < HOST_BITS_PER_INT)
449 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
450 if (inner)
451 align = MIN (align, inner);
454 *alignp = align;
455 *bitposp = bitpos & (align - 1);
456 return res;
458 else if (TREE_CODE (exp) == SSA_NAME
459 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 unsigned int ptr_align, ptr_misalign;
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 *bitposp = ptr_misalign * BITS_PER_UNIT;
467 *alignp = ptr_align * BITS_PER_UNIT;
468 /* Make sure to return a sensible alignment when the multiplication
469 by BITS_PER_UNIT overflowed. */
470 if (*alignp == 0)
471 *alignp = 1u << (HOST_BITS_PER_INT - 1);
472 /* We cannot really tell whether this result is an approximation. */
473 return false;
475 else
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 else if (TREE_CODE (exp) == INTEGER_CST)
484 *alignp = BIGGEST_ALIGNMENT;
485 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
486 & (BIGGEST_ALIGNMENT - 1));
487 return true;
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
502 unsigned int
503 get_pointer_alignment (tree exp)
505 unsigned HOST_WIDE_INT bitpos = 0;
506 unsigned int align;
508 get_pointer_alignment_1 (exp, &align, &bitpos);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
513 if (bitpos != 0)
514 align = least_bit_hwi (bitpos);
516 return align;
519 /* Return the number of non-zero elements in the sequence
520 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
521 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523 static unsigned
524 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528 unsigned n;
530 if (eltsize == 1)
532 /* Optimize the common case of plain char. */
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n;
536 if (!*elt)
537 break;
540 else
542 for (n = 0; n < maxelts; n++)
544 const char *elt = (const char*) ptr + n * eltsize;
545 if (!memcmp (elt, "\0\0\0\0", eltsize))
546 break;
549 return n;
552 /* Compute the length of a null-terminated character string or wide
553 character string handling character sizes of 1, 2, and 4 bytes.
554 TREE_STRING_LENGTH is not the right way because it evaluates to
555 the size of the character array in bytes (as opposed to characters)
556 and because it can contain a zero byte in the middle.
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
565 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
566 accesses. Note that this implies the result is not going to be emitted
567 into the instruction stream.
569 The value returned is of type `ssizetype'.
571 Unfortunately, string_constant can't access the values of const char
572 arrays with initializers, so neither can we do so here. */
574 tree
575 c_strlen (tree src, int only_value)
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 tree len1, len2;
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
593 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595 /* Offset from the beginning of the string in bytes. */
596 tree byteoff;
597 src = string_constant (src, &byteoff);
598 if (src == 0)
599 return NULL_TREE;
601 /* Determine the size of the string element. */
602 unsigned eltsize
603 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
606 length of SRC. */
607 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609 /* PTR can point to the byte representation of any string type, including
610 char* and wchar_t*. */
611 const char *ptr = TREE_STRING_POINTER (src);
613 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
615 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
616 compute the offset to the following null if we don't know where to
617 start searching for it. */
618 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 /* Return when an embedded null character is found. */
621 return NULL_TREE;
624 /* We don't know the starting offset, but we do know that the string
625 has no internal zero bytes. We can assume that the offset falls
626 within the bounds of the string; otherwise, the programmer deserves
627 what he gets. Subtract the offset from the length of the string,
628 and return that. This would perhaps not be valid if we were dealing
629 with named arrays in addition to literal string constants. */
631 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
634 /* Offset from the beginning of the string in elements. */
635 HOST_WIDE_INT eltoff;
637 /* We have a known offset into the string. Start searching there for
638 a null character if we can represent it as a single HOST_WIDE_INT. */
639 if (byteoff == 0)
640 eltoff = 0;
641 else if (! tree_fits_shwi_p (byteoff))
642 eltoff = -1;
643 else
644 eltoff = tree_to_shwi (byteoff) / eltsize;
646 /* If the offset is known to be out of bounds, warn, and call strlen at
647 runtime. */
648 if (eltoff < 0 || eltoff > maxelts)
650 /* Suppress multiple warnings for propagated constant strings. */
651 if (only_value != 2
652 && !TREE_NO_WARNING (src))
654 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
655 eltoff);
656 TREE_NO_WARNING (src) = 1;
658 return NULL_TREE;
661 /* Use strlen to search for the first zero byte. Since any strings
662 constructed with build_string will have nulls appended, we win even
663 if we get handed something like (char[4])"abcd".
665 Since ELTOFF is our starting index into the string, no further
666 calculation is needed. */
667 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
668 maxelts - eltoff);
670 return ssize_int (len);
673 /* Return a constant integer corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676 static rtx
677 c_readstr (const char *str, scalar_int_mode mode)
679 HOST_WIDE_INT ch;
680 unsigned int i, j;
681 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
683 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
684 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
685 / HOST_BITS_PER_WIDE_INT;
687 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
688 for (i = 0; i < len; i++)
689 tmp[i] = 0;
691 ch = 1;
692 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 j = i;
695 if (WORDS_BIG_ENDIAN)
696 j = GET_MODE_SIZE (mode) - i - 1;
697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
699 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
700 j *= BITS_PER_UNIT;
702 if (ch)
703 ch = (unsigned char) str[i];
704 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
707 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
708 return immed_wide_int_const (c, mode);
711 /* Cast a target constant CST to target CHAR and if that value fits into
712 host char type, return zero and put that value into variable pointed to by
713 P. */
715 static int
716 target_char_cast (tree cst, char *p)
718 unsigned HOST_WIDE_INT val, hostval;
720 if (TREE_CODE (cst) != INTEGER_CST
721 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
722 return 1;
724 /* Do not care if it fits or not right here. */
725 val = TREE_INT_CST_LOW (cst);
727 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
728 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
730 hostval = val;
731 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
732 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
734 if (val != hostval)
735 return 1;
737 *p = hostval;
738 return 0;
741 /* Similar to save_expr, but assumes that arbitrary code is not executed
742 in between the multiple evaluations. In particular, we assume that a
743 non-addressable local variable will not be modified. */
745 static tree
746 builtin_save_expr (tree exp)
748 if (TREE_CODE (exp) == SSA_NAME
749 || (TREE_ADDRESSABLE (exp) == 0
750 && (TREE_CODE (exp) == PARM_DECL
751 || (VAR_P (exp) && !TREE_STATIC (exp)))))
752 return exp;
754 return save_expr (exp);
757 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
758 times to get the address of either a higher stack frame, or a return
759 address located within it (depending on FNDECL_CODE). */
761 static rtx
762 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 int i;
765 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
766 if (tem == NULL_RTX)
768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
773 For a nonzero count, or a zero count with __builtin_frame_address,
774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
776 we must disable frame pointer elimination. */
777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 tem = frame_pointer_rtx;
779 else
781 tem = hard_frame_pointer_rtx;
783 /* Tell reload not to eliminate the frame pointer. */
784 crtl->accesses_prior_frames = 1;
788 if (count > 0)
789 SETUP_FRAME_ADDRESSES ();
791 /* On the SPARC, the return address is not in the frame, it is in a
792 register. There is no way to access it off of the current frame
793 pointer, but it can be accessed off the previous frame pointer by
794 reading the value from the register window save area. */
795 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
796 count--;
798 /* Scan back COUNT frames to the specified frame. */
799 for (i = 0; i < count; i++)
801 /* Assume the dynamic chain pointer is in the word that the
802 frame address points to, unless otherwise specified. */
803 tem = DYNAMIC_CHAIN_ADDRESS (tem);
804 tem = memory_address (Pmode, tem);
805 tem = gen_frame_mem (Pmode, tem);
806 tem = copy_to_reg (tem);
809 /* For __builtin_frame_address, return what we've got. But, on
810 the SPARC for example, we may have to add a bias. */
811 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
812 return FRAME_ADDR_RTX (tem);
814 /* For __builtin_return_address, get the return address from that frame. */
815 #ifdef RETURN_ADDR_RTX
816 tem = RETURN_ADDR_RTX (count, tem);
817 #else
818 tem = memory_address (Pmode,
819 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
820 tem = gen_frame_mem (Pmode, tem);
821 #endif
822 return tem;
825 /* Alias set used for setjmp buffer. */
826 static alias_set_type setjmp_alias_set = -1;
828 /* Construct the leading half of a __builtin_setjmp call. Control will
829 return to RECEIVER_LABEL. This is also called directly by the SJLJ
830 exception handling code. */
832 void
833 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
835 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 rtx stack_save;
837 rtx mem;
839 if (setjmp_alias_set == -1)
840 setjmp_alias_set = new_alias_set ();
842 buf_addr = convert_memory_address (Pmode, buf_addr);
844 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
846 /* We store the frame pointer and the address of receiver_label in
847 the buffer and use the rest of it for the stack save area, which
848 is machine-dependent. */
850 mem = gen_rtx_MEM (Pmode, buf_addr);
851 set_mem_alias_set (mem, setjmp_alias_set);
852 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
854 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
855 GET_MODE_SIZE (Pmode))),
856 set_mem_alias_set (mem, setjmp_alias_set);
858 emit_move_insn (validize_mem (mem),
859 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
861 stack_save = gen_rtx_MEM (sa_mode,
862 plus_constant (Pmode, buf_addr,
863 2 * GET_MODE_SIZE (Pmode)));
864 set_mem_alias_set (stack_save, setjmp_alias_set);
865 emit_stack_save (SAVE_NONLOCAL, &stack_save);
867 /* If there is further processing to do, do it. */
868 if (targetm.have_builtin_setjmp_setup ())
869 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
871 /* We have a nonlocal label. */
872 cfun->has_nonlocal_label = 1;
875 /* Construct the trailing part of a __builtin_setjmp call. This is
876 also called directly by the SJLJ exception handling code.
877 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
879 void
880 expand_builtin_setjmp_receiver (rtx receiver_label)
882 rtx chain;
884 /* Mark the FP as used when we get here, so we have to make sure it's
885 marked as used by this function. */
886 emit_use (hard_frame_pointer_rtx);
888 /* Mark the static chain as clobbered here so life information
889 doesn't get messed up for it. */
890 chain = rtx_for_static_chain (current_function_decl, true);
891 if (chain && REG_P (chain))
892 emit_clobber (chain);
894 /* Now put in the code to restore the frame pointer, and argument
895 pointer, if needed. */
896 if (! targetm.have_nonlocal_goto ())
898 /* First adjust our frame pointer to its actual value. It was
899 previously set to the start of the virtual area corresponding to
900 the stacked variables when we branched here and now needs to be
901 adjusted to the actual hardware fp value.
903 Assignments to virtual registers are converted by
904 instantiate_virtual_regs into the corresponding assignment
905 to the underlying register (fp in this case) that makes
906 the original assignment true.
907 So the following insn will actually be decrementing fp by
908 TARGET_STARTING_FRAME_OFFSET. */
909 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
911 /* Restoring the frame pointer also modifies the hard frame pointer.
912 Mark it used (so that the previous assignment remains live once
913 the frame pointer is eliminated) and clobbered (to represent the
914 implicit update from the assignment). */
915 emit_use (hard_frame_pointer_rtx);
916 emit_clobber (hard_frame_pointer_rtx);
919 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
921 /* If the argument pointer can be eliminated in favor of the
922 frame pointer, we don't need to restore it. We assume here
923 that if such an elimination is present, it can always be used.
924 This is the case on all known machines; if we don't make this
925 assumption, we do unnecessary saving on many machines. */
926 size_t i;
927 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
930 if (elim_regs[i].from == ARG_POINTER_REGNUM
931 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
932 break;
934 if (i == ARRAY_SIZE (elim_regs))
936 /* Now restore our arg pointer from the address at which it
937 was saved in our stack frame. */
938 emit_move_insn (crtl->args.internal_arg_pointer,
939 copy_to_reg (get_arg_pointer_save_area ()));
943 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
944 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
945 else if (targetm.have_nonlocal_goto_receiver ())
946 emit_insn (targetm.gen_nonlocal_goto_receiver ());
947 else
948 { /* Nothing */ }
950 /* We must not allow the code we just generated to be reordered by
951 scheduling. Specifically, the update of the frame pointer must
952 happen immediately, not later. */
953 emit_insn (gen_blockage ());
956 /* __builtin_longjmp is passed a pointer to an array of five words (not
957 all will be used on all machines). It operates similarly to the C
958 library function of the same name, but is more efficient. Much of
959 the code below is copied from the handling of non-local gotos. */
961 static void
962 expand_builtin_longjmp (rtx buf_addr, rtx value)
964 rtx fp, lab, stack;
965 rtx_insn *insn, *last;
966 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 /* DRAP is needed for stack realign if longjmp is expanded to current
969 function */
970 if (SUPPORTS_STACK_ALIGNMENT)
971 crtl->need_drap = true;
973 if (setjmp_alias_set == -1)
974 setjmp_alias_set = new_alias_set ();
976 buf_addr = convert_memory_address (Pmode, buf_addr);
978 buf_addr = force_reg (Pmode, buf_addr);
980 /* We require that the user must pass a second argument of 1, because
981 that is what builtin_setjmp will return. */
982 gcc_assert (value == const1_rtx);
984 last = get_last_insn ();
985 if (targetm.have_builtin_longjmp ())
986 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
987 else
989 fp = gen_rtx_MEM (Pmode, buf_addr);
990 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
991 GET_MODE_SIZE (Pmode)));
993 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
994 2 * GET_MODE_SIZE (Pmode)));
995 set_mem_alias_set (fp, setjmp_alias_set);
996 set_mem_alias_set (lab, setjmp_alias_set);
997 set_mem_alias_set (stack, setjmp_alias_set);
999 /* Pick up FP, label, and SP from the block and jump. This code is
1000 from expand_goto in stmt.c; see there for detailed comments. */
1001 if (targetm.have_nonlocal_goto ())
1002 /* We have to pass a value to the nonlocal_goto pattern that will
1003 get copied into the static_chain pointer, but it does not matter
1004 what that value is, because builtin_setjmp does not use it. */
1005 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1006 else
1008 lab = copy_to_reg (lab);
1010 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1011 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1013 emit_move_insn (hard_frame_pointer_rtx, fp);
1014 emit_stack_restore (SAVE_NONLOCAL, stack);
1016 emit_use (hard_frame_pointer_rtx);
1017 emit_use (stack_pointer_rtx);
1018 emit_indirect_jump (lab);
1022 /* Search backwards and mark the jump insn as a non-local goto.
1023 Note that this precludes the use of __builtin_longjmp to a
1024 __builtin_setjmp target in the same function. However, we've
1025 already cautioned the user that these functions are for
1026 internal exception handling use only. */
1027 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1029 gcc_assert (insn != last);
1031 if (JUMP_P (insn))
1033 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1034 break;
1036 else if (CALL_P (insn))
1037 break;
1041 static inline bool
1042 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1044 return (iter->i < iter->n);
1047 /* This function validates the types of a function call argument list
1048 against a specified list of tree_codes. If the last specifier is a 0,
1049 that represents an ellipsis, otherwise the last specifier must be a
1050 VOID_TYPE. */
1052 static bool
1053 validate_arglist (const_tree callexpr, ...)
1055 enum tree_code code;
1056 bool res = 0;
1057 va_list ap;
1058 const_call_expr_arg_iterator iter;
1059 const_tree arg;
1061 va_start (ap, callexpr);
1062 init_const_call_expr_arg_iterator (callexpr, &iter);
1064 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1065 tree fn = CALL_EXPR_FN (callexpr);
1066 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1068 for (unsigned argno = 1; ; ++argno)
1070 code = (enum tree_code) va_arg (ap, int);
1072 switch (code)
1074 case 0:
1075 /* This signifies an ellipses, any further arguments are all ok. */
1076 res = true;
1077 goto end;
1078 case VOID_TYPE:
1079 /* This signifies an endlink, if no arguments remain, return
1080 true, otherwise return false. */
1081 res = !more_const_call_expr_args_p (&iter);
1082 goto end;
1083 case POINTER_TYPE:
1084 /* The actual argument must be nonnull when either the whole
1085 called function has been declared nonnull, or when the formal
1086 argument corresponding to the actual argument has been. */
1087 if (argmap
1088 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code) || integer_zerop (arg))
1092 goto end;
1093 break;
1095 /* FALLTHRU */
1096 default:
1097 /* If no parameters remain or the parameter's code does not
1098 match the specified code, return false. Otherwise continue
1099 checking any remaining arguments. */
1100 arg = next_const_call_expr_arg (&iter);
1101 if (!validate_arg (arg, code))
1102 goto end;
1103 break;
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1112 BITMAP_FREE (argmap);
1114 return res;
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1120 static rtx
1121 expand_builtin_nonlocal_goto (tree exp)
1123 tree t_label, t_save_area;
1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1145 crtl->has_nonlocal_goto = 1;
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1150 else
1152 r_label = copy_to_reg (r_label);
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1176 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1177 emit_use (pic_offset_table_rtx);
1179 emit_indirect_jump (r_label);
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1186 if (JUMP_P (insn))
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1191 else if (CALL_P (insn))
1192 break;
1195 return const0_rtx;
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1203 void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 buf_addr = convert_memory_address (Pmode, buf_addr);
1208 rtx stack_save
1209 = gen_rtx_MEM (sa_mode,
1210 memory_address
1211 (sa_mode,
1212 plus_constant (Pmode, buf_addr,
1213 2 * GET_MODE_SIZE (Pmode))));
1215 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1218 /* Expand a call to __builtin_prefetch. For a target that does not support
1219 data prefetch, evaluate the memory address argument in case it has side
1220 effects. */
1222 static void
1223 expand_builtin_prefetch (tree exp)
1225 tree arg0, arg1, arg2;
1226 int nargs;
1227 rtx op0, op1, op2;
1229 if (!validate_arglist (exp, POINTER_TYPE, 0))
1230 return;
1232 arg0 = CALL_EXPR_ARG (exp, 0);
1234 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1235 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1236 locality). */
1237 nargs = call_expr_nargs (exp);
1238 if (nargs > 1)
1239 arg1 = CALL_EXPR_ARG (exp, 1);
1240 else
1241 arg1 = integer_zero_node;
1242 if (nargs > 2)
1243 arg2 = CALL_EXPR_ARG (exp, 2);
1244 else
1245 arg2 = integer_three_node;
1247 /* Argument 0 is an address. */
1248 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1250 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1251 if (TREE_CODE (arg1) != INTEGER_CST)
1253 error ("second argument to %<__builtin_prefetch%> must be a constant");
1254 arg1 = integer_zero_node;
1256 op1 = expand_normal (arg1);
1257 /* Argument 1 must be either zero or one. */
1258 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1260 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1261 " using zero");
1262 op1 = const0_rtx;
1265 /* Argument 2 (locality) must be a compile-time constant int. */
1266 if (TREE_CODE (arg2) != INTEGER_CST)
1268 error ("third argument to %<__builtin_prefetch%> must be a constant");
1269 arg2 = integer_zero_node;
1271 op2 = expand_normal (arg2);
1272 /* Argument 2 must be 0, 1, 2, or 3. */
1273 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1275 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1276 op2 = const0_rtx;
1279 if (targetm.have_prefetch ())
1281 struct expand_operand ops[3];
1283 create_address_operand (&ops[0], op0);
1284 create_integer_operand (&ops[1], INTVAL (op1));
1285 create_integer_operand (&ops[2], INTVAL (op2));
1286 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1287 return;
1290 /* Don't do anything with direct references to volatile memory, but
1291 generate code to handle other side effects. */
1292 if (!MEM_P (op0) && side_effects_p (op0))
1293 emit_insn (op0);
1296 /* Get a MEM rtx for expression EXP which is the address of an operand
1297 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1298 the maximum length of the block of memory that might be accessed or
1299 NULL if unknown. */
1301 static rtx
1302 get_memory_rtx (tree exp, tree len)
1304 tree orig_exp = exp;
1305 rtx addr, mem;
1307 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1308 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1309 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1310 exp = TREE_OPERAND (exp, 0);
1312 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1313 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1315 /* Get an expression we can use to find the attributes to assign to MEM.
1316 First remove any nops. */
1317 while (CONVERT_EXPR_P (exp)
1318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1319 exp = TREE_OPERAND (exp, 0);
1321 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1322 (as builtin stringops may alias with anything). */
1323 exp = fold_build2 (MEM_REF,
1324 build_array_type (char_type_node,
1325 build_range_type (sizetype,
1326 size_one_node, len)),
1327 exp, build_int_cst (ptr_type_node, 0));
1329 /* If the MEM_REF has no acceptable address, try to get the base object
1330 from the original address we got, and build an all-aliasing
1331 unknown-sized access to that one. */
1332 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1333 set_mem_attributes (mem, exp, 0);
1334 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1335 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1336 0))))
1338 exp = build_fold_addr_expr (exp);
1339 exp = fold_build2 (MEM_REF,
1340 build_array_type (char_type_node,
1341 build_range_type (sizetype,
1342 size_zero_node,
1343 NULL)),
1344 exp, build_int_cst (ptr_type_node, 0));
1345 set_mem_attributes (mem, exp, 0);
1347 set_mem_alias_set (mem, 0);
1348 return mem;
1351 /* Built-in functions to perform an untyped call and return. */
1353 #define apply_args_mode \
1354 (this_target_builtins->x_apply_args_mode)
1355 #define apply_result_mode \
1356 (this_target_builtins->x_apply_result_mode)
1358 /* Return the size required for the block returned by __builtin_apply_args,
1359 and initialize apply_args_mode. */
1361 static int
1362 apply_args_size (void)
1364 static int size = -1;
1365 int align;
1366 unsigned int regno;
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1377 size += GET_MODE_SIZE (Pmode);
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1382 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1384 gcc_assert (mode != VOIDmode);
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1392 else
1394 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1397 return size;
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1403 static int
1404 apply_result_size (void)
1406 static int size = -1;
1407 int align, regno;
1409 /* The values computed by this function never change. */
1410 if (size < 0)
1412 size = 0;
1414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1415 if (targetm.calls.function_value_regno_p (regno))
1417 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1419 gcc_assert (mode != VOIDmode);
1421 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1422 if (size % align != 0)
1423 size = CEIL (size, align) * align;
1424 size += GET_MODE_SIZE (mode);
1425 apply_result_mode[regno] = mode;
1427 else
1428 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1430 /* Allow targets that use untyped_call and untyped_return to override
1431 the size so that machine-specific information can be stored here. */
1432 #ifdef APPLY_RESULT_SIZE
1433 size = APPLY_RESULT_SIZE;
1434 #endif
1436 return size;
1439 /* Create a vector describing the result block RESULT. If SAVEP is true,
1440 the result block is used to save the values; otherwise it is used to
1441 restore the values. */
1443 static rtx
1444 result_vector (int savep, rtx result)
1446 int regno, size, align, nelts;
1447 fixed_size_mode mode;
1448 rtx reg, mem;
1449 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1451 size = nelts = 0;
1452 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1453 if ((mode = apply_result_mode[regno]) != VOIDmode)
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1459 mem = adjust_address (result, mode, size);
1460 savevec[nelts++] = (savep
1461 ? gen_rtx_SET (mem, reg)
1462 : gen_rtx_SET (reg, mem));
1463 size += GET_MODE_SIZE (mode);
1465 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1468 /* Save the state required to perform an untyped call with the same
1469 arguments as were passed to the current function. */
1471 static rtx
1472 expand_builtin_apply_args_1 (void)
1474 rtx registers, tem;
1475 int size, align, regno;
1476 fixed_size_mode mode;
1477 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1479 /* Create a block where the arg-pointer, structure value address,
1480 and argument registers can be saved. */
1481 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1483 /* Walk past the arg-pointer and structure value address. */
1484 size = GET_MODE_SIZE (Pmode);
1485 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1486 size += GET_MODE_SIZE (Pmode);
1488 /* Save each register used in calling a function to the block. */
1489 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1490 if ((mode = apply_args_mode[regno]) != VOIDmode)
1492 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1493 if (size % align != 0)
1494 size = CEIL (size, align) * align;
1496 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1498 emit_move_insn (adjust_address (registers, mode, size), tem);
1499 size += GET_MODE_SIZE (mode);
1502 /* Save the arg pointer to the block. */
1503 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1504 /* We need the pointer as the caller actually passed them to us, not
1505 as we might have pretended they were passed. Make sure it's a valid
1506 operand, as emit_move_insn isn't expected to handle a PLUS. */
1507 if (STACK_GROWS_DOWNWARD)
1509 = force_operand (plus_constant (Pmode, tem,
1510 crtl->args.pretend_args_size),
1511 NULL_RTX);
1512 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1514 size = GET_MODE_SIZE (Pmode);
1516 /* Save the structure value address unless this is passed as an
1517 "invisible" first argument. */
1518 if (struct_incoming_value)
1520 emit_move_insn (adjust_address (registers, Pmode, size),
1521 copy_to_reg (struct_incoming_value));
1522 size += GET_MODE_SIZE (Pmode);
1525 /* Return the address of the block. */
1526 return copy_addr_to_reg (XEXP (registers, 0));
1529 /* __builtin_apply_args returns block of memory allocated on
1530 the stack into which is stored the arg pointer, structure
1531 value address, static chain, and all the registers that might
1532 possibly be used in performing a function call. The code is
1533 moved to the start of the function so the incoming values are
1534 saved. */
1536 static rtx
1537 expand_builtin_apply_args (void)
1539 /* Don't do __builtin_apply_args more than once in a function.
1540 Save the result of the first call and reuse it. */
1541 if (apply_args_value != 0)
1542 return apply_args_value;
1544 /* When this function is called, it means that registers must be
1545 saved on entry to this function. So we migrate the
1546 call to the first insn of this function. */
1547 rtx temp;
1549 start_sequence ();
1550 temp = expand_builtin_apply_args_1 ();
1551 rtx_insn *seq = get_insns ();
1552 end_sequence ();
1554 apply_args_value = temp;
1556 /* Put the insns after the NOTE that starts the function.
1557 If this is inside a start_sequence, make the outer-level insn
1558 chain current, so the code is placed at the start of the
1559 function. If internal_arg_pointer is a non-virtual pseudo,
1560 it needs to be placed after the function that initializes
1561 that pseudo. */
1562 push_topmost_sequence ();
1563 if (REG_P (crtl->args.internal_arg_pointer)
1564 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1565 emit_insn_before (seq, parm_birth_insn);
1566 else
1567 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1568 pop_topmost_sequence ();
1569 return temp;
1573 /* Perform an untyped call and save the state required to perform an
1574 untyped return of whatever value was returned by the given function. */
1576 static rtx
1577 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1579 int size, align, regno;
1580 fixed_size_mode mode;
1581 rtx incoming_args, result, reg, dest, src;
1582 rtx_call_insn *call_insn;
1583 rtx old_stack_level = 0;
1584 rtx call_fusage = 0;
1585 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587 arguments = convert_memory_address (Pmode, arguments);
1589 /* Create a block where the return registers can be saved. */
1590 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592 /* Fetch the arg pointer from the ARGUMENTS block. */
1593 incoming_args = gen_reg_rtx (Pmode);
1594 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1595 if (!STACK_GROWS_DOWNWARD)
1596 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1597 incoming_args, 0, OPTAB_LIB_WIDEN);
1599 /* Push a new argument block and copy the arguments. Do not allow
1600 the (potential) memcpy call below to interfere with our stack
1601 manipulations. */
1602 do_pending_stack_adjust ();
1603 NO_DEFER_POP;
1605 /* Save the stack with nonlocal if available. */
1606 if (targetm.have_save_stack_nonlocal ())
1607 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1608 else
1609 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1611 /* Allocate a block of memory onto the stack and copy the memory
1612 arguments to the outgoing arguments address. We can pass TRUE
1613 as the 4th argument because we just saved the stack pointer
1614 and will restore it right after the call. */
1615 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1617 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1618 may have already set current_function_calls_alloca to true.
1619 current_function_calls_alloca won't be set if argsize is zero,
1620 so we have to guarantee need_drap is true here. */
1621 if (SUPPORTS_STACK_ALIGNMENT)
1622 crtl->need_drap = true;
1624 dest = virtual_outgoing_args_rtx;
1625 if (!STACK_GROWS_DOWNWARD)
1627 if (CONST_INT_P (argsize))
1628 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1629 else
1630 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 dest = gen_rtx_MEM (BLKmode, dest);
1633 set_mem_align (dest, PARM_BOUNDARY);
1634 src = gen_rtx_MEM (BLKmode, incoming_args);
1635 set_mem_align (src, PARM_BOUNDARY);
1636 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1638 /* Refer to the argument block. */
1639 apply_args_size ();
1640 arguments = gen_rtx_MEM (BLKmode, arguments);
1641 set_mem_align (arguments, PARM_BOUNDARY);
1643 /* Walk past the arg-pointer and structure value address. */
1644 size = GET_MODE_SIZE (Pmode);
1645 if (struct_value)
1646 size += GET_MODE_SIZE (Pmode);
1648 /* Restore each of the registers previously saved. Make USE insns
1649 for each of these registers for use in making the call. */
1650 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1651 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1654 if (size % align != 0)
1655 size = CEIL (size, align) * align;
1656 reg = gen_rtx_REG (mode, regno);
1657 emit_move_insn (reg, adjust_address (arguments, mode, size));
1658 use_reg (&call_fusage, reg);
1659 size += GET_MODE_SIZE (mode);
1662 /* Restore the structure value address unless this is passed as an
1663 "invisible" first argument. */
1664 size = GET_MODE_SIZE (Pmode);
1665 if (struct_value)
1667 rtx value = gen_reg_rtx (Pmode);
1668 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1669 emit_move_insn (struct_value, value);
1670 if (REG_P (struct_value))
1671 use_reg (&call_fusage, struct_value);
1672 size += GET_MODE_SIZE (Pmode);
1675 /* All arguments and registers used for the call are set up by now! */
1676 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1678 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1679 and we don't want to load it into a register as an optimization,
1680 because prepare_call_address already did it if it should be done. */
1681 if (GET_CODE (function) != SYMBOL_REF)
1682 function = memory_address (FUNCTION_MODE, function);
1684 /* Generate the actual call instruction and save the return value. */
1685 if (targetm.have_untyped_call ())
1687 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1688 emit_call_insn (targetm.gen_untyped_call (mem, result,
1689 result_vector (1, result)));
1691 else if (targetm.have_call_value ())
1693 rtx valreg = 0;
1695 /* Locate the unique return register. It is not possible to
1696 express a call that sets more than one return register using
1697 call_value; use untyped_call for that. In fact, untyped_call
1698 only needs to save the return registers in the given block. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 gcc_assert (!valreg); /* have_untyped_call required. */
1704 valreg = gen_rtx_REG (mode, regno);
1707 emit_insn (targetm.gen_call_value (valreg,
1708 gen_rtx_MEM (FUNCTION_MODE, function),
1709 const0_rtx, NULL_RTX, const0_rtx));
1711 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 else
1714 gcc_unreachable ();
1716 /* Find the CALL insn we just emitted, and attach the register usage
1717 information. */
1718 call_insn = last_call_insn ();
1719 add_function_usage_to (call_insn, call_fusage);
1721 /* Restore the stack. */
1722 if (targetm.have_save_stack_nonlocal ())
1723 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1724 else
1725 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1726 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1728 OK_DEFER_POP;
1730 /* Return the address of the result block. */
1731 result = copy_addr_to_reg (XEXP (result, 0));
1732 return convert_memory_address (ptr_mode, result);
1735 /* Perform an untyped return. */
1737 static void
1738 expand_builtin_return (rtx result)
1740 int size, align, regno;
1741 fixed_size_mode mode;
1742 rtx reg;
1743 rtx_insn *call_fusage = 0;
1745 result = convert_memory_address (Pmode, result);
1747 apply_result_size ();
1748 result = gen_rtx_MEM (BLKmode, result);
1750 if (targetm.have_untyped_return ())
1752 rtx vector = result_vector (0, result);
1753 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1754 emit_barrier ();
1755 return;
1758 /* Restore the return value and note that each value is used. */
1759 size = 0;
1760 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1761 if ((mode = apply_result_mode[regno]) != VOIDmode)
1763 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1764 if (size % align != 0)
1765 size = CEIL (size, align) * align;
1766 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1767 emit_move_insn (reg, adjust_address (result, mode, size));
1769 push_to_sequence (call_fusage);
1770 emit_use (reg);
1771 call_fusage = get_insns ();
1772 end_sequence ();
1773 size += GET_MODE_SIZE (mode);
1776 /* Put the USE insns before the return. */
1777 emit_insn (call_fusage);
1779 /* Return whatever values was restored by jumping directly to the end
1780 of the function. */
1781 expand_naked_return ();
1784 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1786 static enum type_class
1787 type_to_class (tree type)
1789 switch (TREE_CODE (type))
1791 case VOID_TYPE: return void_type_class;
1792 case INTEGER_TYPE: return integer_type_class;
1793 case ENUMERAL_TYPE: return enumeral_type_class;
1794 case BOOLEAN_TYPE: return boolean_type_class;
1795 case POINTER_TYPE: return pointer_type_class;
1796 case REFERENCE_TYPE: return reference_type_class;
1797 case OFFSET_TYPE: return offset_type_class;
1798 case REAL_TYPE: return real_type_class;
1799 case COMPLEX_TYPE: return complex_type_class;
1800 case FUNCTION_TYPE: return function_type_class;
1801 case METHOD_TYPE: return method_type_class;
1802 case RECORD_TYPE: return record_type_class;
1803 case UNION_TYPE:
1804 case QUAL_UNION_TYPE: return union_type_class;
1805 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1806 ? string_type_class : array_type_class);
1807 case LANG_TYPE: return lang_type_class;
1808 default: return no_type_class;
1812 /* Expand a call EXP to __builtin_classify_type. */
1814 static rtx
1815 expand_builtin_classify_type (tree exp)
1817 if (call_expr_nargs (exp))
1818 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1819 return GEN_INT (no_type_class);
1822 /* This helper macro, meant to be used in mathfn_built_in below, determines
1823 which among a set of builtin math functions is appropriate for a given type
1824 mode. The `F' (float) and `L' (long double) are automatically generated
1825 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1826 types, there are additional types that are considered with 'F32', 'F64',
1827 'F128', etc. suffixes. */
1828 #define CASE_MATHFN(MATHFN) \
1829 CASE_CFN_##MATHFN: \
1830 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1831 fcodel = BUILT_IN_##MATHFN##L ; break;
1832 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1833 types. */
1834 #define CASE_MATHFN_FLOATN(MATHFN) \
1835 CASE_CFN_##MATHFN: \
1836 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1837 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1838 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1839 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1840 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1841 break;
1842 /* Similar to above, but appends _R after any F/L suffix. */
1843 #define CASE_MATHFN_REENT(MATHFN) \
1844 case CFN_BUILT_IN_##MATHFN##_R: \
1845 case CFN_BUILT_IN_##MATHFN##F_R: \
1846 case CFN_BUILT_IN_##MATHFN##L_R: \
1847 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1848 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1850 /* Return a function equivalent to FN but operating on floating-point
1851 values of type TYPE, or END_BUILTINS if no such function exists.
1852 This is purely an operation on function codes; it does not guarantee
1853 that the target actually has an implementation of the function. */
1855 static built_in_function
1856 mathfn_built_in_2 (tree type, combined_fn fn)
1858 tree mtype;
1859 built_in_function fcode, fcodef, fcodel;
1860 built_in_function fcodef16 = END_BUILTINS;
1861 built_in_function fcodef32 = END_BUILTINS;
1862 built_in_function fcodef64 = END_BUILTINS;
1863 built_in_function fcodef128 = END_BUILTINS;
1864 built_in_function fcodef32x = END_BUILTINS;
1865 built_in_function fcodef64x = END_BUILTINS;
1866 built_in_function fcodef128x = END_BUILTINS;
1868 switch (fn)
1870 CASE_MATHFN (ACOS)
1871 CASE_MATHFN (ACOSH)
1872 CASE_MATHFN (ASIN)
1873 CASE_MATHFN (ASINH)
1874 CASE_MATHFN (ATAN)
1875 CASE_MATHFN (ATAN2)
1876 CASE_MATHFN (ATANH)
1877 CASE_MATHFN (CBRT)
1878 CASE_MATHFN_FLOATN (CEIL)
1879 CASE_MATHFN (CEXPI)
1880 CASE_MATHFN_FLOATN (COPYSIGN)
1881 CASE_MATHFN (COS)
1882 CASE_MATHFN (COSH)
1883 CASE_MATHFN (DREM)
1884 CASE_MATHFN (ERF)
1885 CASE_MATHFN (ERFC)
1886 CASE_MATHFN (EXP)
1887 CASE_MATHFN (EXP10)
1888 CASE_MATHFN (EXP2)
1889 CASE_MATHFN (EXPM1)
1890 CASE_MATHFN (FABS)
1891 CASE_MATHFN (FDIM)
1892 CASE_MATHFN_FLOATN (FLOOR)
1893 CASE_MATHFN_FLOATN (FMA)
1894 CASE_MATHFN_FLOATN (FMAX)
1895 CASE_MATHFN_FLOATN (FMIN)
1896 CASE_MATHFN (FMOD)
1897 CASE_MATHFN (FREXP)
1898 CASE_MATHFN (GAMMA)
1899 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1900 CASE_MATHFN (HUGE_VAL)
1901 CASE_MATHFN (HYPOT)
1902 CASE_MATHFN (ILOGB)
1903 CASE_MATHFN (ICEIL)
1904 CASE_MATHFN (IFLOOR)
1905 CASE_MATHFN (INF)
1906 CASE_MATHFN (IRINT)
1907 CASE_MATHFN (IROUND)
1908 CASE_MATHFN (ISINF)
1909 CASE_MATHFN (J0)
1910 CASE_MATHFN (J1)
1911 CASE_MATHFN (JN)
1912 CASE_MATHFN (LCEIL)
1913 CASE_MATHFN (LDEXP)
1914 CASE_MATHFN (LFLOOR)
1915 CASE_MATHFN (LGAMMA)
1916 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1917 CASE_MATHFN (LLCEIL)
1918 CASE_MATHFN (LLFLOOR)
1919 CASE_MATHFN (LLRINT)
1920 CASE_MATHFN (LLROUND)
1921 CASE_MATHFN (LOG)
1922 CASE_MATHFN (LOG10)
1923 CASE_MATHFN (LOG1P)
1924 CASE_MATHFN (LOG2)
1925 CASE_MATHFN (LOGB)
1926 CASE_MATHFN (LRINT)
1927 CASE_MATHFN (LROUND)
1928 CASE_MATHFN (MODF)
1929 CASE_MATHFN (NAN)
1930 CASE_MATHFN (NANS)
1931 CASE_MATHFN_FLOATN (NEARBYINT)
1932 CASE_MATHFN (NEXTAFTER)
1933 CASE_MATHFN (NEXTTOWARD)
1934 CASE_MATHFN (POW)
1935 CASE_MATHFN (POWI)
1936 CASE_MATHFN (POW10)
1937 CASE_MATHFN (REMAINDER)
1938 CASE_MATHFN (REMQUO)
1939 CASE_MATHFN_FLOATN (RINT)
1940 CASE_MATHFN_FLOATN (ROUND)
1941 CASE_MATHFN (SCALB)
1942 CASE_MATHFN (SCALBLN)
1943 CASE_MATHFN (SCALBN)
1944 CASE_MATHFN (SIGNBIT)
1945 CASE_MATHFN (SIGNIFICAND)
1946 CASE_MATHFN (SIN)
1947 CASE_MATHFN (SINCOS)
1948 CASE_MATHFN (SINH)
1949 CASE_MATHFN_FLOATN (SQRT)
1950 CASE_MATHFN (TAN)
1951 CASE_MATHFN (TANH)
1952 CASE_MATHFN (TGAMMA)
1953 CASE_MATHFN_FLOATN (TRUNC)
1954 CASE_MATHFN (Y0)
1955 CASE_MATHFN (Y1)
1956 CASE_MATHFN (YN)
1958 default:
1959 return END_BUILTINS;
1962 mtype = TYPE_MAIN_VARIANT (type);
1963 if (mtype == double_type_node)
1964 return fcode;
1965 else if (mtype == float_type_node)
1966 return fcodef;
1967 else if (mtype == long_double_type_node)
1968 return fcodel;
1969 else if (mtype == float16_type_node)
1970 return fcodef16;
1971 else if (mtype == float32_type_node)
1972 return fcodef32;
1973 else if (mtype == float64_type_node)
1974 return fcodef64;
1975 else if (mtype == float128_type_node)
1976 return fcodef128;
1977 else if (mtype == float32x_type_node)
1978 return fcodef32x;
1979 else if (mtype == float64x_type_node)
1980 return fcodef64x;
1981 else if (mtype == float128x_type_node)
1982 return fcodef128x;
1983 else
1984 return END_BUILTINS;
1987 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1988 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1989 otherwise use the explicit declaration. If we can't do the conversion,
1990 return null. */
1992 static tree
1993 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1995 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1996 if (fcode2 == END_BUILTINS)
1997 return NULL_TREE;
1999 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2000 return NULL_TREE;
2002 return builtin_decl_explicit (fcode2);
2005 /* Like mathfn_built_in_1, but always use the implicit array. */
2007 tree
2008 mathfn_built_in (tree type, combined_fn fn)
2010 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2013 /* Like mathfn_built_in_1, but take a built_in_function and
2014 always use the implicit array. */
2016 tree
2017 mathfn_built_in (tree type, enum built_in_function fn)
2019 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2022 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2023 return its code, otherwise return IFN_LAST. Note that this function
2024 only tests whether the function is defined in internals.def, not whether
2025 it is actually available on the target. */
2027 internal_fn
2028 associated_internal_fn (tree fndecl)
2030 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2031 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2032 switch (DECL_FUNCTION_CODE (fndecl))
2034 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2035 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2036 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2038 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2039 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2040 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2041 #include "internal-fn.def"
2043 CASE_FLT_FN (BUILT_IN_POW10):
2044 return IFN_EXP10;
2046 CASE_FLT_FN (BUILT_IN_DREM):
2047 return IFN_REMAINDER;
2049 CASE_FLT_FN (BUILT_IN_SCALBN):
2050 CASE_FLT_FN (BUILT_IN_SCALBLN):
2051 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2052 return IFN_LDEXP;
2053 return IFN_LAST;
2055 default:
2056 return IFN_LAST;
2060 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2061 on the current target by a call to an internal function, return the
2062 code of that internal function, otherwise return IFN_LAST. The caller
2063 is responsible for ensuring that any side-effects of the built-in
2064 call are dealt with correctly. E.g. if CALL sets errno, the caller
2065 must decide that the errno result isn't needed or make it available
2066 in some other way. */
2068 internal_fn
2069 replacement_internal_fn (gcall *call)
2071 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2073 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2074 if (ifn != IFN_LAST)
2076 tree_pair types = direct_internal_fn_types (ifn, call);
2077 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2078 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2079 return ifn;
2082 return IFN_LAST;
2085 /* Expand a call to the builtin trinary math functions (fma).
2086 Return NULL_RTX if a normal call should be emitted rather than expanding the
2087 function in-line. EXP is the expression that is a call to the builtin
2088 function; if convenient, the result should be placed in TARGET.
2089 SUBTARGET may be used as the target for computing one of EXP's
2090 operands. */
2092 static rtx
2093 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2095 optab builtin_optab;
2096 rtx op0, op1, op2, result;
2097 rtx_insn *insns;
2098 tree fndecl = get_callee_fndecl (exp);
2099 tree arg0, arg1, arg2;
2100 machine_mode mode;
2102 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2103 return NULL_RTX;
2105 arg0 = CALL_EXPR_ARG (exp, 0);
2106 arg1 = CALL_EXPR_ARG (exp, 1);
2107 arg2 = CALL_EXPR_ARG (exp, 2);
2109 switch (DECL_FUNCTION_CODE (fndecl))
2111 CASE_FLT_FN (BUILT_IN_FMA):
2112 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2113 builtin_optab = fma_optab; break;
2114 default:
2115 gcc_unreachable ();
2118 /* Make a suitable register to place result in. */
2119 mode = TYPE_MODE (TREE_TYPE (exp));
2121 /* Before working hard, check whether the instruction is available. */
2122 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2123 return NULL_RTX;
2125 result = gen_reg_rtx (mode);
2127 /* Always stabilize the argument list. */
2128 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2129 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2130 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2132 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2133 op1 = expand_normal (arg1);
2134 op2 = expand_normal (arg2);
2136 start_sequence ();
2138 /* Compute into RESULT.
2139 Set RESULT to wherever the result comes back. */
2140 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2141 result, 0);
2143 /* If we were unable to expand via the builtin, stop the sequence
2144 (without outputting the insns) and call to the library function
2145 with the stabilized argument list. */
2146 if (result == 0)
2148 end_sequence ();
2149 return expand_call (exp, target, target == const0_rtx);
2152 /* Output the entire sequence. */
2153 insns = get_insns ();
2154 end_sequence ();
2155 emit_insn (insns);
2157 return result;
2160 /* Expand a call to the builtin sin and cos math functions.
2161 Return NULL_RTX if a normal call should be emitted rather than expanding the
2162 function in-line. EXP is the expression that is a call to the builtin
2163 function; if convenient, the result should be placed in TARGET.
2164 SUBTARGET may be used as the target for computing one of EXP's
2165 operands. */
2167 static rtx
2168 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2170 optab builtin_optab;
2171 rtx op0;
2172 rtx_insn *insns;
2173 tree fndecl = get_callee_fndecl (exp);
2174 machine_mode mode;
2175 tree arg;
2177 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2178 return NULL_RTX;
2180 arg = CALL_EXPR_ARG (exp, 0);
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 CASE_FLT_FN (BUILT_IN_COS):
2186 builtin_optab = sincos_optab; break;
2187 default:
2188 gcc_unreachable ();
2191 /* Make a suitable register to place result in. */
2192 mode = TYPE_MODE (TREE_TYPE (exp));
2194 /* Check if sincos insn is available, otherwise fallback
2195 to sin or cos insn. */
2196 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2197 switch (DECL_FUNCTION_CODE (fndecl))
2199 CASE_FLT_FN (BUILT_IN_SIN):
2200 builtin_optab = sin_optab; break;
2201 CASE_FLT_FN (BUILT_IN_COS):
2202 builtin_optab = cos_optab; break;
2203 default:
2204 gcc_unreachable ();
2207 /* Before working hard, check whether the instruction is available. */
2208 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2210 rtx result = gen_reg_rtx (mode);
2212 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2213 need to expand the argument again. This way, we will not perform
2214 side-effects more the once. */
2215 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2217 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2219 start_sequence ();
2221 /* Compute into RESULT.
2222 Set RESULT to wherever the result comes back. */
2223 if (builtin_optab == sincos_optab)
2225 int ok;
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_SIN):
2230 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2231 break;
2232 CASE_FLT_FN (BUILT_IN_COS):
2233 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2234 break;
2235 default:
2236 gcc_unreachable ();
2238 gcc_assert (ok);
2240 else
2241 result = expand_unop (mode, builtin_optab, op0, result, 0);
2243 if (result != 0)
2245 /* Output the entire sequence. */
2246 insns = get_insns ();
2247 end_sequence ();
2248 emit_insn (insns);
2249 return result;
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2255 end_sequence ();
2258 return expand_call (exp, target, target == const0_rtx);
2261 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2262 return an RTL instruction code that implements the functionality.
2263 If that isn't possible or available return CODE_FOR_nothing. */
2265 static enum insn_code
2266 interclass_mathfn_icode (tree arg, tree fndecl)
2268 bool errno_set = false;
2269 optab builtin_optab = unknown_optab;
2270 machine_mode mode;
2272 switch (DECL_FUNCTION_CODE (fndecl))
2274 CASE_FLT_FN (BUILT_IN_ILOGB):
2275 errno_set = true; builtin_optab = ilogb_optab; break;
2276 CASE_FLT_FN (BUILT_IN_ISINF):
2277 builtin_optab = isinf_optab; break;
2278 case BUILT_IN_ISNORMAL:
2279 case BUILT_IN_ISFINITE:
2280 CASE_FLT_FN (BUILT_IN_FINITE):
2281 case BUILT_IN_FINITED32:
2282 case BUILT_IN_FINITED64:
2283 case BUILT_IN_FINITED128:
2284 case BUILT_IN_ISINFD32:
2285 case BUILT_IN_ISINFD64:
2286 case BUILT_IN_ISINFD128:
2287 /* These builtins have no optabs (yet). */
2288 break;
2289 default:
2290 gcc_unreachable ();
2293 /* There's no easy way to detect the case we need to set EDOM. */
2294 if (flag_errno_math && errno_set)
2295 return CODE_FOR_nothing;
2297 /* Optab mode depends on the mode of the input argument. */
2298 mode = TYPE_MODE (TREE_TYPE (arg));
2300 if (builtin_optab)
2301 return optab_handler (builtin_optab, mode);
2302 return CODE_FOR_nothing;
2305 /* Expand a call to one of the builtin math functions that operate on
2306 floating point argument and output an integer result (ilogb, isinf,
2307 isnan, etc).
2308 Return 0 if a normal call should be emitted rather than expanding the
2309 function in-line. EXP is the expression that is a call to the builtin
2310 function; if convenient, the result should be placed in TARGET. */
2312 static rtx
2313 expand_builtin_interclass_mathfn (tree exp, rtx target)
2315 enum insn_code icode = CODE_FOR_nothing;
2316 rtx op0;
2317 tree fndecl = get_callee_fndecl (exp);
2318 machine_mode mode;
2319 tree arg;
2321 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2322 return NULL_RTX;
2324 arg = CALL_EXPR_ARG (exp, 0);
2325 icode = interclass_mathfn_icode (arg, fndecl);
2326 mode = TYPE_MODE (TREE_TYPE (arg));
2328 if (icode != CODE_FOR_nothing)
2330 struct expand_operand ops[1];
2331 rtx_insn *last = get_last_insn ();
2332 tree orig_arg = arg;
2334 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2335 need to expand the argument again. This way, we will not perform
2336 side-effects more the once. */
2337 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2339 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2341 if (mode != GET_MODE (op0))
2342 op0 = convert_to_mode (mode, op0, 0);
2344 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2345 if (maybe_legitimize_operands (icode, 0, 1, ops)
2346 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2347 return ops[0].value;
2349 delete_insns_since (last);
2350 CALL_EXPR_ARG (exp, 0) = orig_arg;
2353 return NULL_RTX;
2356 /* Expand a call to the builtin sincos math function.
2357 Return NULL_RTX if a normal call should be emitted rather than expanding the
2358 function in-line. EXP is the expression that is a call to the builtin
2359 function. */
2361 static rtx
2362 expand_builtin_sincos (tree exp)
2364 rtx op0, op1, op2, target1, target2;
2365 machine_mode mode;
2366 tree arg, sinp, cosp;
2367 int result;
2368 location_t loc = EXPR_LOCATION (exp);
2369 tree alias_type, alias_off;
2371 if (!validate_arglist (exp, REAL_TYPE,
2372 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2373 return NULL_RTX;
2375 arg = CALL_EXPR_ARG (exp, 0);
2376 sinp = CALL_EXPR_ARG (exp, 1);
2377 cosp = CALL_EXPR_ARG (exp, 2);
2379 /* Make a suitable register to place result in. */
2380 mode = TYPE_MODE (TREE_TYPE (arg));
2382 /* Check if sincos insn is available, otherwise emit the call. */
2383 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2384 return NULL_RTX;
2386 target1 = gen_reg_rtx (mode);
2387 target2 = gen_reg_rtx (mode);
2389 op0 = expand_normal (arg);
2390 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2391 alias_off = build_int_cst (alias_type, 0);
2392 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2393 sinp, alias_off));
2394 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2395 cosp, alias_off));
2397 /* Compute into target1 and target2.
2398 Set TARGET to wherever the result comes back. */
2399 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2400 gcc_assert (result);
2402 /* Move target1 and target2 to the memory locations indicated
2403 by op1 and op2. */
2404 emit_move_insn (op1, target1);
2405 emit_move_insn (op2, target2);
2407 return const0_rtx;
2410 /* Expand a call to the internal cexpi builtin to the sincos math function.
2411 EXP is the expression that is a call to the builtin function; if convenient,
2412 the result should be placed in TARGET. */
2414 static rtx
2415 expand_builtin_cexpi (tree exp, rtx target)
2417 tree fndecl = get_callee_fndecl (exp);
2418 tree arg, type;
2419 machine_mode mode;
2420 rtx op0, op1, op2;
2421 location_t loc = EXPR_LOCATION (exp);
2423 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2424 return NULL_RTX;
2426 arg = CALL_EXPR_ARG (exp, 0);
2427 type = TREE_TYPE (arg);
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2430 /* Try expanding via a sincos optab, fall back to emitting a libcall
2431 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2432 is only generated from sincos, cexp or if we have either of them. */
2433 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2435 op1 = gen_reg_rtx (mode);
2436 op2 = gen_reg_rtx (mode);
2438 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2440 /* Compute into op1 and op2. */
2441 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2443 else if (targetm.libc_has_function (function_sincos))
2445 tree call, fn = NULL_TREE;
2446 tree top1, top2;
2447 rtx op1a, op2a;
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2455 else
2456 gcc_unreachable ();
2458 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2459 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2460 op1a = copy_addr_to_reg (XEXP (op1, 0));
2461 op2a = copy_addr_to_reg (XEXP (op2, 0));
2462 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2463 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2465 /* Make sure not to fold the sincos call again. */
2466 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2467 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2468 call, 3, arg, top1, top2));
2470 else
2472 tree call, fn = NULL_TREE, narg;
2473 tree ctype = build_complex_type (type);
2475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2476 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2478 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2481 else
2482 gcc_unreachable ();
2484 /* If we don't have a decl for cexp create one. This is the
2485 friendliest fallback if the user calls __builtin_cexpi
2486 without full target C99 function support. */
2487 if (fn == NULL_TREE)
2489 tree fntype;
2490 const char *name = NULL;
2492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2493 name = "cexpf";
2494 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2495 name = "cexp";
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2497 name = "cexpl";
2499 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2500 fn = build_fn_decl (name, fntype);
2503 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2504 build_real (type, dconst0), arg);
2506 /* Make sure not to fold the cexp call again. */
2507 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2508 return expand_expr (build_call_nary (ctype, call, 1, narg),
2509 target, VOIDmode, EXPAND_NORMAL);
2512 /* Now build the proper return type. */
2513 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2514 make_tree (TREE_TYPE (arg), op2),
2515 make_tree (TREE_TYPE (arg), op1)),
2516 target, VOIDmode, EXPAND_NORMAL);
2519 /* Conveniently construct a function call expression. FNDECL names the
2520 function to be called, N is the number of arguments, and the "..."
2521 parameters are the argument expressions. Unlike build_call_exr
2522 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2524 static tree
2525 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2527 va_list ap;
2528 tree fntype = TREE_TYPE (fndecl);
2529 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2531 va_start (ap, n);
2532 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2533 va_end (ap);
2534 SET_EXPR_LOCATION (fn, loc);
2535 return fn;
2538 /* Expand a call to one of the builtin rounding functions gcc defines
2539 as an extension (lfloor and lceil). As these are gcc extensions we
2540 do not need to worry about setting errno to EDOM.
2541 If expanding via optab fails, lower expression to (int)(floor(x)).
2542 EXP is the expression that is a call to the builtin function;
2543 if convenient, the result should be placed in TARGET. */
2545 static rtx
2546 expand_builtin_int_roundingfn (tree exp, rtx target)
2548 convert_optab builtin_optab;
2549 rtx op0, tmp;
2550 rtx_insn *insns;
2551 tree fndecl = get_callee_fndecl (exp);
2552 enum built_in_function fallback_fn;
2553 tree fallback_fndecl;
2554 machine_mode mode;
2555 tree arg;
2557 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2558 gcc_unreachable ();
2560 arg = CALL_EXPR_ARG (exp, 0);
2562 switch (DECL_FUNCTION_CODE (fndecl))
2564 CASE_FLT_FN (BUILT_IN_ICEIL):
2565 CASE_FLT_FN (BUILT_IN_LCEIL):
2566 CASE_FLT_FN (BUILT_IN_LLCEIL):
2567 builtin_optab = lceil_optab;
2568 fallback_fn = BUILT_IN_CEIL;
2569 break;
2571 CASE_FLT_FN (BUILT_IN_IFLOOR):
2572 CASE_FLT_FN (BUILT_IN_LFLOOR):
2573 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2574 builtin_optab = lfloor_optab;
2575 fallback_fn = BUILT_IN_FLOOR;
2576 break;
2578 default:
2579 gcc_unreachable ();
2582 /* Make a suitable register to place result in. */
2583 mode = TYPE_MODE (TREE_TYPE (exp));
2585 target = gen_reg_rtx (mode);
2587 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2588 need to expand the argument again. This way, we will not perform
2589 side-effects more the once. */
2590 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2592 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2594 start_sequence ();
2596 /* Compute into TARGET. */
2597 if (expand_sfix_optab (target, op0, builtin_optab))
2599 /* Output the entire sequence. */
2600 insns = get_insns ();
2601 end_sequence ();
2602 emit_insn (insns);
2603 return target;
2606 /* If we were unable to expand via the builtin, stop the sequence
2607 (without outputting the insns). */
2608 end_sequence ();
2610 /* Fall back to floating point rounding optab. */
2611 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2613 /* For non-C99 targets we may end up without a fallback fndecl here
2614 if the user called __builtin_lfloor directly. In this case emit
2615 a call to the floor/ceil variants nevertheless. This should result
2616 in the best user experience for not full C99 targets. */
2617 if (fallback_fndecl == NULL_TREE)
2619 tree fntype;
2620 const char *name = NULL;
2622 switch (DECL_FUNCTION_CODE (fndecl))
2624 case BUILT_IN_ICEIL:
2625 case BUILT_IN_LCEIL:
2626 case BUILT_IN_LLCEIL:
2627 name = "ceil";
2628 break;
2629 case BUILT_IN_ICEILF:
2630 case BUILT_IN_LCEILF:
2631 case BUILT_IN_LLCEILF:
2632 name = "ceilf";
2633 break;
2634 case BUILT_IN_ICEILL:
2635 case BUILT_IN_LCEILL:
2636 case BUILT_IN_LLCEILL:
2637 name = "ceill";
2638 break;
2639 case BUILT_IN_IFLOOR:
2640 case BUILT_IN_LFLOOR:
2641 case BUILT_IN_LLFLOOR:
2642 name = "floor";
2643 break;
2644 case BUILT_IN_IFLOORF:
2645 case BUILT_IN_LFLOORF:
2646 case BUILT_IN_LLFLOORF:
2647 name = "floorf";
2648 break;
2649 case BUILT_IN_IFLOORL:
2650 case BUILT_IN_LFLOORL:
2651 case BUILT_IN_LLFLOORL:
2652 name = "floorl";
2653 break;
2654 default:
2655 gcc_unreachable ();
2658 fntype = build_function_type_list (TREE_TYPE (arg),
2659 TREE_TYPE (arg), NULL_TREE);
2660 fallback_fndecl = build_fn_decl (name, fntype);
2663 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2665 tmp = expand_normal (exp);
2666 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2668 /* Truncate the result of floating point optab to integer
2669 via expand_fix (). */
2670 target = gen_reg_rtx (mode);
2671 expand_fix (target, tmp, 0);
2673 return target;
2676 /* Expand a call to one of the builtin math functions doing integer
2677 conversion (lrint).
2678 Return 0 if a normal call should be emitted rather than expanding the
2679 function in-line. EXP is the expression that is a call to the builtin
2680 function; if convenient, the result should be placed in TARGET. */
2682 static rtx
2683 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2685 convert_optab builtin_optab;
2686 rtx op0;
2687 rtx_insn *insns;
2688 tree fndecl = get_callee_fndecl (exp);
2689 tree arg;
2690 machine_mode mode;
2691 enum built_in_function fallback_fn = BUILT_IN_NONE;
2693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2694 gcc_unreachable ();
2696 arg = CALL_EXPR_ARG (exp, 0);
2698 switch (DECL_FUNCTION_CODE (fndecl))
2700 CASE_FLT_FN (BUILT_IN_IRINT):
2701 fallback_fn = BUILT_IN_LRINT;
2702 gcc_fallthrough ();
2703 CASE_FLT_FN (BUILT_IN_LRINT):
2704 CASE_FLT_FN (BUILT_IN_LLRINT):
2705 builtin_optab = lrint_optab;
2706 break;
2708 CASE_FLT_FN (BUILT_IN_IROUND):
2709 fallback_fn = BUILT_IN_LROUND;
2710 gcc_fallthrough ();
2711 CASE_FLT_FN (BUILT_IN_LROUND):
2712 CASE_FLT_FN (BUILT_IN_LLROUND):
2713 builtin_optab = lround_optab;
2714 break;
2716 default:
2717 gcc_unreachable ();
2720 /* There's no easy way to detect the case we need to set EDOM. */
2721 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2722 return NULL_RTX;
2724 /* Make a suitable register to place result in. */
2725 mode = TYPE_MODE (TREE_TYPE (exp));
2727 /* There's no easy way to detect the case we need to set EDOM. */
2728 if (!flag_errno_math)
2730 rtx result = gen_reg_rtx (mode);
2732 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2733 need to expand the argument again. This way, we will not perform
2734 side-effects more the once. */
2735 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2737 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2739 start_sequence ();
2741 if (expand_sfix_optab (result, op0, builtin_optab))
2743 /* Output the entire sequence. */
2744 insns = get_insns ();
2745 end_sequence ();
2746 emit_insn (insns);
2747 return result;
2750 /* If we were unable to expand via the builtin, stop the sequence
2751 (without outputting the insns) and call to the library function
2752 with the stabilized argument list. */
2753 end_sequence ();
2756 if (fallback_fn != BUILT_IN_NONE)
2758 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2759 targets, (int) round (x) should never be transformed into
2760 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2761 a call to lround in the hope that the target provides at least some
2762 C99 functions. This should result in the best user experience for
2763 not full C99 targets. */
2764 tree fallback_fndecl = mathfn_built_in_1
2765 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2767 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2768 fallback_fndecl, 1, arg);
2770 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2771 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2772 return convert_to_mode (mode, target, 0);
2775 return expand_call (exp, target, target == const0_rtx);
2778 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2779 a normal call should be emitted rather than expanding the function
2780 in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2783 static rtx
2784 expand_builtin_powi (tree exp, rtx target)
2786 tree arg0, arg1;
2787 rtx op0, op1;
2788 machine_mode mode;
2789 machine_mode mode2;
2791 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2792 return NULL_RTX;
2794 arg0 = CALL_EXPR_ARG (exp, 0);
2795 arg1 = CALL_EXPR_ARG (exp, 1);
2796 mode = TYPE_MODE (TREE_TYPE (exp));
2798 /* Emit a libcall to libgcc. */
2800 /* Mode of the 2nd argument must match that of an int. */
2801 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2803 if (target == NULL_RTX)
2804 target = gen_reg_rtx (mode);
2806 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2807 if (GET_MODE (op0) != mode)
2808 op0 = convert_to_mode (mode, op0, 0);
2809 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2810 if (GET_MODE (op1) != mode2)
2811 op1 = convert_to_mode (mode2, op1, 0);
2813 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2814 target, LCT_CONST, mode,
2815 op0, mode, op1, mode2);
2817 return target;
2820 /* Expand expression EXP which is a call to the strlen builtin. Return
2821 NULL_RTX if we failed the caller should emit a normal call, otherwise
2822 try to get the result in TARGET, if convenient. */
2824 static rtx
2825 expand_builtin_strlen (tree exp, rtx target,
2826 machine_mode target_mode)
2828 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2829 return NULL_RTX;
2831 struct expand_operand ops[4];
2832 rtx pat;
2833 tree len;
2834 tree src = CALL_EXPR_ARG (exp, 0);
2835 rtx src_reg;
2836 rtx_insn *before_strlen;
2837 machine_mode insn_mode;
2838 enum insn_code icode = CODE_FOR_nothing;
2839 unsigned int align;
2841 /* If the length can be computed at compile-time, return it. */
2842 len = c_strlen (src, 0);
2843 if (len)
2844 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2846 /* If the length can be computed at compile-time and is constant
2847 integer, but there are side-effects in src, evaluate
2848 src for side-effects, then return len.
2849 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2850 can be optimized into: i++; x = 3; */
2851 len = c_strlen (src, 1);
2852 if (len && TREE_CODE (len) == INTEGER_CST)
2854 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2858 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2860 /* If SRC is not a pointer type, don't do this operation inline. */
2861 if (align == 0)
2862 return NULL_RTX;
2864 /* Bail out if we can't compute strlen in the right mode. */
2865 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2867 icode = optab_handler (strlen_optab, insn_mode);
2868 if (icode != CODE_FOR_nothing)
2869 break;
2871 if (insn_mode == VOIDmode)
2872 return NULL_RTX;
2874 /* Make a place to hold the source address. We will not expand
2875 the actual source until we are sure that the expansion will
2876 not fail -- there are trees that cannot be expanded twice. */
2877 src_reg = gen_reg_rtx (Pmode);
2879 /* Mark the beginning of the strlen sequence so we can emit the
2880 source operand later. */
2881 before_strlen = get_last_insn ();
2883 create_output_operand (&ops[0], target, insn_mode);
2884 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2885 create_integer_operand (&ops[2], 0);
2886 create_integer_operand (&ops[3], align);
2887 if (!maybe_expand_insn (icode, 4, ops))
2888 return NULL_RTX;
2890 /* Check to see if the argument was declared attribute nonstring
2891 and if so, issue a warning since at this point it's not known
2892 to be nul-terminated. */
2893 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2895 /* Now that we are assured of success, expand the source. */
2896 start_sequence ();
2897 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2898 if (pat != src_reg)
2900 #ifdef POINTERS_EXTEND_UNSIGNED
2901 if (GET_MODE (pat) != Pmode)
2902 pat = convert_to_mode (Pmode, pat,
2903 POINTERS_EXTEND_UNSIGNED);
2904 #endif
2905 emit_move_insn (src_reg, pat);
2907 pat = get_insns ();
2908 end_sequence ();
2910 if (before_strlen)
2911 emit_insn_after (pat, before_strlen);
2912 else
2913 emit_insn_before (pat, get_insns ());
2915 /* Return the value in the proper mode for this function. */
2916 if (GET_MODE (ops[0].value) == target_mode)
2917 target = ops[0].value;
2918 else if (target != 0)
2919 convert_move (target, ops[0].value, 0);
2920 else
2921 target = convert_to_mode (target_mode, ops[0].value, 0);
2923 return target;
2926 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2927 bytes from constant string DATA + OFFSET and return it as target
2928 constant. */
2930 static rtx
2931 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2932 scalar_int_mode mode)
2934 const char *str = (const char *) data;
2936 gcc_assert (offset >= 0
2937 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2938 <= strlen (str) + 1));
2940 return c_readstr (str + offset, mode);
2943 /* LEN specify length of the block of memcpy/memset operation.
2944 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2945 In some cases we can make very likely guess on max size, then we
2946 set it into PROBABLE_MAX_SIZE. */
2948 static void
2949 determine_block_size (tree len, rtx len_rtx,
2950 unsigned HOST_WIDE_INT *min_size,
2951 unsigned HOST_WIDE_INT *max_size,
2952 unsigned HOST_WIDE_INT *probable_max_size)
2954 if (CONST_INT_P (len_rtx))
2956 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2957 return;
2959 else
2961 wide_int min, max;
2962 enum value_range_type range_type = VR_UNDEFINED;
2964 /* Determine bounds from the type. */
2965 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2966 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2967 else
2968 *min_size = 0;
2969 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2970 *probable_max_size = *max_size
2971 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2972 else
2973 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2975 if (TREE_CODE (len) == SSA_NAME)
2976 range_type = get_range_info (len, &min, &max);
2977 if (range_type == VR_RANGE)
2979 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2980 *min_size = min.to_uhwi ();
2981 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2982 *probable_max_size = *max_size = max.to_uhwi ();
2984 else if (range_type == VR_ANTI_RANGE)
2986 /* Anti range 0...N lets us to determine minimal size to N+1. */
2987 if (min == 0)
2989 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2990 *min_size = max.to_uhwi () + 1;
2992 /* Code like
2994 int n;
2995 if (n < 100)
2996 memcpy (a, b, n)
2998 Produce anti range allowing negative values of N. We still
2999 can use the information and make a guess that N is not negative.
3001 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3002 *probable_max_size = min.to_uhwi () - 1;
3005 gcc_checking_assert (*max_size <=
3006 (unsigned HOST_WIDE_INT)
3007 GET_MODE_MASK (GET_MODE (len_rtx)));
3010 /* Try to verify that the sizes and lengths of the arguments to a string
3011 manipulation function given by EXP are within valid bounds and that
3012 the operation does not lead to buffer overflow or read past the end.
3013 Arguments other than EXP may be null. When non-null, the arguments
3014 have the following meaning:
3015 DST is the destination of a copy call or NULL otherwise.
3016 SRC is the source of a copy call or NULL otherwise.
3017 DSTWRITE is the number of bytes written into the destination obtained
3018 from the user-supplied size argument to the function (such as in
3019 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3020 MAXREAD is the user-supplied bound on the length of the source sequence
3021 (such as in strncat(d, s, N). It specifies the upper limit on the number
3022 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3023 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3024 expression EXP is a string function call (as opposed to a memory call
3025 like memcpy). As an exception, SRCSTR can also be an integer denoting
3026 the precomputed size of the source string or object (for functions like
3027 memcpy).
3028 DSTSIZE is the size of the destination object specified by the last
3029 argument to the _chk builtins, typically resulting from the expansion
3030 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3031 DSTSIZE).
3033 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3034 SIZE_MAX.
3036 If the call is successfully verified as safe return true, otherwise
3037 return false. */
3039 static bool
3040 check_access (tree exp, tree, tree, tree dstwrite,
3041 tree maxread, tree srcstr, tree dstsize)
3043 int opt = OPT_Wstringop_overflow_;
3045 /* The size of the largest object is half the address space, or
3046 PTRDIFF_MAX. (This is way too permissive.) */
3047 tree maxobjsize = max_object_size ();
3049 /* Either the length of the source string for string functions or
3050 the size of the source object for raw memory functions. */
3051 tree slen = NULL_TREE;
3053 tree range[2] = { NULL_TREE, NULL_TREE };
3055 /* Set to true when the exact number of bytes written by a string
3056 function like strcpy is not known and the only thing that is
3057 known is that it must be at least one (for the terminating nul). */
3058 bool at_least_one = false;
3059 if (srcstr)
3061 /* SRCSTR is normally a pointer to string but as a special case
3062 it can be an integer denoting the length of a string. */
3063 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3065 /* Try to determine the range of lengths the source string
3066 refers to. If it can be determined and is less than
3067 the upper bound given by MAXREAD add one to it for
3068 the terminating nul. Otherwise, set it to one for
3069 the same reason, or to MAXREAD as appropriate. */
3070 get_range_strlen (srcstr, range);
3071 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3073 if (maxread && tree_int_cst_le (maxread, range[0]))
3074 range[0] = range[1] = maxread;
3075 else
3076 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3077 range[0], size_one_node);
3079 if (maxread && tree_int_cst_le (maxread, range[1]))
3080 range[1] = maxread;
3081 else if (!integer_all_onesp (range[1]))
3082 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3083 range[1], size_one_node);
3085 slen = range[0];
3087 else
3089 at_least_one = true;
3090 slen = size_one_node;
3093 else
3094 slen = srcstr;
3097 if (!dstwrite && !maxread)
3099 /* When the only available piece of data is the object size
3100 there is nothing to do. */
3101 if (!slen)
3102 return true;
3104 /* Otherwise, when the length of the source sequence is known
3105 (as with strlen), set DSTWRITE to it. */
3106 if (!range[0])
3107 dstwrite = slen;
3110 if (!dstsize)
3111 dstsize = maxobjsize;
3113 if (dstwrite)
3114 get_size_range (dstwrite, range);
3116 tree func = get_callee_fndecl (exp);
3118 /* First check the number of bytes to be written against the maximum
3119 object size. */
3120 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3122 location_t loc = tree_nonartificial_location (exp);
3123 loc = expansion_point_location_if_in_system_header (loc);
3125 if (range[0] == range[1])
3126 warning_at (loc, opt,
3127 "%K%qD specified size %E "
3128 "exceeds maximum object size %E",
3129 exp, func, range[0], maxobjsize);
3130 else
3131 warning_at (loc, opt,
3132 "%K%qD specified size between %E and %E "
3133 "exceeds maximum object size %E",
3134 exp, func,
3135 range[0], range[1], maxobjsize);
3136 return false;
3139 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3140 constant, and in range of unsigned HOST_WIDE_INT. */
3141 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3143 /* Next check the number of bytes to be written against the destination
3144 object size. */
3145 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3147 if (range[0]
3148 && ((tree_fits_uhwi_p (dstsize)
3149 && tree_int_cst_lt (dstsize, range[0]))
3150 || (tree_fits_uhwi_p (dstwrite)
3151 && tree_int_cst_lt (dstwrite, range[0]))))
3153 location_t loc = tree_nonartificial_location (exp);
3154 loc = expansion_point_location_if_in_system_header (loc);
3156 if (dstwrite == slen && at_least_one)
3158 /* This is a call to strcpy with a destination of 0 size
3159 and a source of unknown length. The call will write
3160 at least one byte past the end of the destination. */
3161 warning_at (loc, opt,
3162 "%K%qD writing %E or more bytes into a region "
3163 "of size %E overflows the destination",
3164 exp, func, range[0], dstsize);
3166 else if (tree_int_cst_equal (range[0], range[1]))
3167 warning_at (loc, opt,
3168 (integer_onep (range[0])
3169 ? G_("%K%qD writing %E byte into a region "
3170 "of size %E overflows the destination")
3171 : G_("%K%qD writing %E bytes into a region "
3172 "of size %E overflows the destination")),
3173 exp, func, range[0], dstsize);
3174 else if (tree_int_cst_sign_bit (range[1]))
3176 /* Avoid printing the upper bound if it's invalid. */
3177 warning_at (loc, opt,
3178 "%K%qD writing %E or more bytes into a region "
3179 "of size %E overflows the destination",
3180 exp, func, range[0], dstsize);
3182 else
3183 warning_at (loc, opt,
3184 "%K%qD writing between %E and %E bytes into "
3185 "a region of size %E overflows the destination",
3186 exp, func, range[0], range[1],
3187 dstsize);
3189 /* Return error when an overflow has been detected. */
3190 return false;
3194 /* Check the maximum length of the source sequence against the size
3195 of the destination object if known, or against the maximum size
3196 of an object. */
3197 if (maxread)
3199 get_size_range (maxread, range);
3201 /* Use the lower end for MAXREAD from now on. */
3202 if (range[0])
3203 maxread = range[0];
3205 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3207 location_t loc = tree_nonartificial_location (exp);
3208 loc = expansion_point_location_if_in_system_header (loc);
3210 if (tree_int_cst_lt (maxobjsize, range[0]))
3212 /* Warn about crazy big sizes first since that's more
3213 likely to be meaningful than saying that the bound
3214 is greater than the object size if both are big. */
3215 if (range[0] == range[1])
3216 warning_at (loc, opt,
3217 "%K%qD specified bound %E "
3218 "exceeds maximum object size %E",
3219 exp, func,
3220 range[0], maxobjsize);
3221 else
3222 warning_at (loc, opt,
3223 "%K%qD specified bound between %E and %E "
3224 "exceeds maximum object size %E",
3225 exp, func,
3226 range[0], range[1], maxobjsize);
3228 return false;
3231 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3233 if (tree_int_cst_equal (range[0], range[1]))
3234 warning_at (loc, opt,
3235 "%K%qD specified bound %E "
3236 "exceeds destination size %E",
3237 exp, func,
3238 range[0], dstsize);
3239 else
3240 warning_at (loc, opt,
3241 "%K%qD specified bound between %E and %E "
3242 "exceeds destination size %E",
3243 exp, func,
3244 range[0], range[1], dstsize);
3245 return false;
3250 /* Check for reading past the end of SRC. */
3251 if (slen
3252 && slen == srcstr
3253 && dstwrite && range[0]
3254 && tree_int_cst_lt (slen, range[0]))
3256 location_t loc = tree_nonartificial_location (exp);
3258 if (tree_int_cst_equal (range[0], range[1]))
3259 warning_at (loc, opt,
3260 (tree_int_cst_equal (range[0], integer_one_node)
3261 ? G_("%K%qD reading %E byte from a region of size %E")
3262 : G_("%K%qD reading %E bytes from a region of size %E")),
3263 exp, func, range[0], slen);
3264 else if (tree_int_cst_sign_bit (range[1]))
3266 /* Avoid printing the upper bound if it's invalid. */
3267 warning_at (loc, opt,
3268 "%K%qD reading %E or more bytes from a region "
3269 "of size %E",
3270 exp, func, range[0], slen);
3272 else
3273 warning_at (loc, opt,
3274 "%K%qD reading between %E and %E bytes from a region "
3275 "of size %E",
3276 exp, func, range[0], range[1], slen);
3277 return false;
3280 return true;
3283 /* Helper to compute the size of the object referenced by the DEST
3284 expression which must have pointer type, using Object Size type
3285 OSTYPE (only the least significant 2 bits are used). Return
3286 an estimate of the size of the object if successful or NULL when
3287 the size cannot be determined. When the referenced object involves
3288 a non-constant offset in some range the returned value represents
3289 the largest size given the smallest non-negative offset in the
3290 range. The function is intended for diagnostics and should not
3291 be used to influence code generation or optimization. */
3293 tree
3294 compute_objsize (tree dest, int ostype)
3296 unsigned HOST_WIDE_INT size;
3298 /* Only the two least significant bits are meaningful. */
3299 ostype &= 3;
3301 if (compute_builtin_object_size (dest, ostype, &size))
3302 return build_int_cst (sizetype, size);
3304 if (TREE_CODE (dest) == SSA_NAME)
3306 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3307 if (!is_gimple_assign (stmt))
3308 return NULL_TREE;
3310 dest = gimple_assign_rhs1 (stmt);
3312 tree_code code = gimple_assign_rhs_code (stmt);
3313 if (code == POINTER_PLUS_EXPR)
3315 /* compute_builtin_object_size fails for addresses with
3316 non-constant offsets. Try to determine the range of
3317 such an offset here and use it to adjus the constant
3318 size. */
3319 tree off = gimple_assign_rhs2 (stmt);
3320 if (TREE_CODE (off) == SSA_NAME
3321 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3323 wide_int min, max;
3324 enum value_range_type rng = get_range_info (off, &min, &max);
3326 if (rng == VR_RANGE)
3328 if (tree size = compute_objsize (dest, ostype))
3330 wide_int wisiz = wi::to_wide (size);
3332 /* Ignore negative offsets for now. For others,
3333 use the lower bound as the most optimistic
3334 estimate of the (remaining)size. */
3335 if (wi::sign_mask (min))
3337 else if (wi::ltu_p (min, wisiz))
3338 return wide_int_to_tree (TREE_TYPE (size),
3339 wi::sub (wisiz, min));
3340 else
3341 return size_zero_node;
3346 else if (code != ADDR_EXPR)
3347 return NULL_TREE;
3350 /* Unless computing the largest size (for memcpy and other raw memory
3351 functions), try to determine the size of the object from its type. */
3352 if (!ostype)
3353 return NULL_TREE;
3355 if (TREE_CODE (dest) != ADDR_EXPR)
3356 return NULL_TREE;
3358 tree type = TREE_TYPE (dest);
3359 if (TREE_CODE (type) == POINTER_TYPE)
3360 type = TREE_TYPE (type);
3362 type = TYPE_MAIN_VARIANT (type);
3364 if (TREE_CODE (type) == ARRAY_TYPE
3365 && !array_at_struct_end_p (dest))
3367 /* Return the constant size unless it's zero (that's a zero-length
3368 array likely at the end of a struct). */
3369 tree size = TYPE_SIZE_UNIT (type);
3370 if (size && TREE_CODE (size) == INTEGER_CST
3371 && !integer_zerop (size))
3372 return size;
3375 return NULL_TREE;
3378 /* Helper to determine and check the sizes of the source and the destination
3379 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3380 call expression, DEST is the destination argument, SRC is the source
3381 argument or null, and LEN is the number of bytes. Use Object Size type-0
3382 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3383 (no overflow or invalid sizes), false otherwise. */
3385 static bool
3386 check_memop_access (tree exp, tree dest, tree src, tree size)
3388 /* For functions like memset and memcpy that operate on raw memory
3389 try to determine the size of the largest source and destination
3390 object using type-0 Object Size regardless of the object size
3391 type specified by the option. */
3392 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3393 tree dstsize = compute_objsize (dest, 0);
3395 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3396 srcsize, dstsize);
3399 /* Validate memchr arguments without performing any expansion.
3400 Return NULL_RTX. */
3402 static rtx
3403 expand_builtin_memchr (tree exp, rtx)
3405 if (!validate_arglist (exp,
3406 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3407 return NULL_RTX;
3409 tree arg1 = CALL_EXPR_ARG (exp, 0);
3410 tree len = CALL_EXPR_ARG (exp, 2);
3412 /* Diagnose calls where the specified length exceeds the size
3413 of the object. */
3414 if (warn_stringop_overflow)
3416 tree size = compute_objsize (arg1, 0);
3417 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3418 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3421 return NULL_RTX;
3424 /* Expand a call EXP to the memcpy builtin.
3425 Return NULL_RTX if we failed, the caller should emit a normal call,
3426 otherwise try to get the result in TARGET, if convenient (and in
3427 mode MODE if that's convenient). */
3429 static rtx
3430 expand_builtin_memcpy (tree exp, rtx target)
3432 if (!validate_arglist (exp,
3433 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3434 return NULL_RTX;
3436 tree dest = CALL_EXPR_ARG (exp, 0);
3437 tree src = CALL_EXPR_ARG (exp, 1);
3438 tree len = CALL_EXPR_ARG (exp, 2);
3440 check_memop_access (exp, dest, src, len);
3442 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3443 /*endp=*/ 0);
3446 /* Check a call EXP to the memmove built-in for validity.
3447 Return NULL_RTX on both success and failure. */
3449 static rtx
3450 expand_builtin_memmove (tree exp, rtx)
3452 if (!validate_arglist (exp,
3453 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3454 return NULL_RTX;
3456 tree dest = CALL_EXPR_ARG (exp, 0);
3457 tree src = CALL_EXPR_ARG (exp, 1);
3458 tree len = CALL_EXPR_ARG (exp, 2);
3460 check_memop_access (exp, dest, src, len);
3462 return NULL_RTX;
3465 /* Expand an instrumented call EXP to the memcpy builtin.
3466 Return NULL_RTX if we failed, the caller should emit a normal call,
3467 otherwise try to get the result in TARGET, if convenient (and in
3468 mode MODE if that's convenient). */
3470 static rtx
3471 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3473 if (!validate_arglist (exp,
3474 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3475 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3476 INTEGER_TYPE, VOID_TYPE))
3477 return NULL_RTX;
3478 else
3480 tree dest = CALL_EXPR_ARG (exp, 0);
3481 tree src = CALL_EXPR_ARG (exp, 2);
3482 tree len = CALL_EXPR_ARG (exp, 4);
3483 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3484 /*end_p=*/ 0);
3486 /* Return src bounds with the result. */
3487 if (res)
3489 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3490 expand_normal (CALL_EXPR_ARG (exp, 1)));
3491 res = chkp_join_splitted_slot (res, bnd);
3493 return res;
3497 /* Expand a call EXP to the mempcpy builtin.
3498 Return NULL_RTX if we failed; the caller should emit a normal call,
3499 otherwise try to get the result in TARGET, if convenient (and in
3500 mode MODE if that's convenient). If ENDP is 0 return the
3501 destination pointer, if ENDP is 1 return the end pointer ala
3502 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3503 stpcpy. */
3505 static rtx
3506 expand_builtin_mempcpy (tree exp, rtx target)
3508 if (!validate_arglist (exp,
3509 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3510 return NULL_RTX;
3512 tree dest = CALL_EXPR_ARG (exp, 0);
3513 tree src = CALL_EXPR_ARG (exp, 1);
3514 tree len = CALL_EXPR_ARG (exp, 2);
3516 /* Policy does not generally allow using compute_objsize (which
3517 is used internally by check_memop_size) to change code generation
3518 or drive optimization decisions.
3520 In this instance it is safe because the code we generate has
3521 the same semantics regardless of the return value of
3522 check_memop_sizes. Exactly the same amount of data is copied
3523 and the return value is exactly the same in both cases.
3525 Furthermore, check_memop_size always uses mode 0 for the call to
3526 compute_objsize, so the imprecise nature of compute_objsize is
3527 avoided. */
3529 /* Avoid expanding mempcpy into memcpy when the call is determined
3530 to overflow the buffer. This also prevents the same overflow
3531 from being diagnosed again when expanding memcpy. */
3532 if (!check_memop_access (exp, dest, src, len))
3533 return NULL_RTX;
3535 return expand_builtin_mempcpy_args (dest, src, len,
3536 target, exp, /*endp=*/ 1);
3539 /* Expand an instrumented call EXP to the mempcpy builtin.
3540 Return NULL_RTX if we failed, the caller should emit a normal call,
3541 otherwise try to get the result in TARGET, if convenient (and in
3542 mode MODE if that's convenient). */
3544 static rtx
3545 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3547 if (!validate_arglist (exp,
3548 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3549 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3550 INTEGER_TYPE, VOID_TYPE))
3551 return NULL_RTX;
3552 else
3554 tree dest = CALL_EXPR_ARG (exp, 0);
3555 tree src = CALL_EXPR_ARG (exp, 2);
3556 tree len = CALL_EXPR_ARG (exp, 4);
3557 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3558 exp, 1);
3560 /* Return src bounds with the result. */
3561 if (res)
3563 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3564 expand_normal (CALL_EXPR_ARG (exp, 1)));
3565 res = chkp_join_splitted_slot (res, bnd);
3567 return res;
3571 /* Helper function to do the actual work for expand of memory copy family
3572 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3573 of memory from SRC to DEST and assign to TARGET if convenient.
3574 If ENDP is 0 return the
3575 destination pointer, if ENDP is 1 return the end pointer ala
3576 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3577 stpcpy. */
3579 static rtx
3580 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3581 rtx target, tree exp, int endp)
3583 const char *src_str;
3584 unsigned int src_align = get_pointer_alignment (src);
3585 unsigned int dest_align = get_pointer_alignment (dest);
3586 rtx dest_mem, src_mem, dest_addr, len_rtx;
3587 HOST_WIDE_INT expected_size = -1;
3588 unsigned int expected_align = 0;
3589 unsigned HOST_WIDE_INT min_size;
3590 unsigned HOST_WIDE_INT max_size;
3591 unsigned HOST_WIDE_INT probable_max_size;
3593 /* If DEST is not a pointer type, call the normal function. */
3594 if (dest_align == 0)
3595 return NULL_RTX;
3597 /* If either SRC is not a pointer type, don't do this
3598 operation in-line. */
3599 if (src_align == 0)
3600 return NULL_RTX;
3602 if (currently_expanding_gimple_stmt)
3603 stringop_block_profile (currently_expanding_gimple_stmt,
3604 &expected_align, &expected_size);
3606 if (expected_align < dest_align)
3607 expected_align = dest_align;
3608 dest_mem = get_memory_rtx (dest, len);
3609 set_mem_align (dest_mem, dest_align);
3610 len_rtx = expand_normal (len);
3611 determine_block_size (len, len_rtx, &min_size, &max_size,
3612 &probable_max_size);
3613 src_str = c_getstr (src);
3615 /* If SRC is a string constant and block move would be done
3616 by pieces, we can avoid loading the string from memory
3617 and only stored the computed constants. */
3618 if (src_str
3619 && CONST_INT_P (len_rtx)
3620 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3621 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3622 CONST_CAST (char *, src_str),
3623 dest_align, false))
3625 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3626 builtin_memcpy_read_str,
3627 CONST_CAST (char *, src_str),
3628 dest_align, false, endp);
3629 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3630 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3631 return dest_mem;
3634 src_mem = get_memory_rtx (src, len);
3635 set_mem_align (src_mem, src_align);
3637 /* Copy word part most expediently. */
3638 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3639 CALL_EXPR_TAILCALL (exp)
3640 && (endp == 0 || target == const0_rtx)
3641 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3642 expected_align, expected_size,
3643 min_size, max_size, probable_max_size);
3645 if (dest_addr == 0)
3647 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3648 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3651 if (endp && target != const0_rtx)
3653 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3654 /* stpcpy pointer to last byte. */
3655 if (endp == 2)
3656 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3659 return dest_addr;
3662 static rtx
3663 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3664 rtx target, tree orig_exp, int endp)
3666 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3667 endp);
3670 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3671 we failed, the caller should emit a normal call, otherwise try to
3672 get the result in TARGET, if convenient. If ENDP is 0 return the
3673 destination pointer, if ENDP is 1 return the end pointer ala
3674 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3675 stpcpy. */
3677 static rtx
3678 expand_movstr (tree dest, tree src, rtx target, int endp)
3680 struct expand_operand ops[3];
3681 rtx dest_mem;
3682 rtx src_mem;
3684 if (!targetm.have_movstr ())
3685 return NULL_RTX;
3687 dest_mem = get_memory_rtx (dest, NULL);
3688 src_mem = get_memory_rtx (src, NULL);
3689 if (!endp)
3691 target = force_reg (Pmode, XEXP (dest_mem, 0));
3692 dest_mem = replace_equiv_address (dest_mem, target);
3695 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3696 create_fixed_operand (&ops[1], dest_mem);
3697 create_fixed_operand (&ops[2], src_mem);
3698 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3699 return NULL_RTX;
3701 if (endp && target != const0_rtx)
3703 target = ops[0].value;
3704 /* movstr is supposed to set end to the address of the NUL
3705 terminator. If the caller requested a mempcpy-like return value,
3706 adjust it. */
3707 if (endp == 1)
3709 rtx tem = plus_constant (GET_MODE (target),
3710 gen_lowpart (GET_MODE (target), target), 1);
3711 emit_move_insn (target, force_operand (tem, NULL_RTX));
3714 return target;
3717 /* Do some very basic size validation of a call to the strcpy builtin
3718 given by EXP. Return NULL_RTX to have the built-in expand to a call
3719 to the library function. */
3721 static rtx
3722 expand_builtin_strcat (tree exp, rtx)
3724 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3725 || !warn_stringop_overflow)
3726 return NULL_RTX;
3728 tree dest = CALL_EXPR_ARG (exp, 0);
3729 tree src = CALL_EXPR_ARG (exp, 1);
3731 /* There is no way here to determine the length of the string in
3732 the destination to which the SRC string is being appended so
3733 just diagnose cases when the souce string is longer than
3734 the destination object. */
3736 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3738 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3739 destsize);
3741 return NULL_RTX;
3744 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3745 NULL_RTX if we failed the caller should emit a normal call, otherwise
3746 try to get the result in TARGET, if convenient (and in mode MODE if that's
3747 convenient). */
3749 static rtx
3750 expand_builtin_strcpy (tree exp, rtx target)
3752 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3753 return NULL_RTX;
3755 tree dest = CALL_EXPR_ARG (exp, 0);
3756 tree src = CALL_EXPR_ARG (exp, 1);
3758 if (warn_stringop_overflow)
3760 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3761 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3762 src, destsize);
3765 return expand_builtin_strcpy_args (dest, src, target);
3768 /* Helper function to do the actual work for expand_builtin_strcpy. The
3769 arguments to the builtin_strcpy call DEST and SRC are broken out
3770 so that this can also be called without constructing an actual CALL_EXPR.
3771 The other arguments and return value are the same as for
3772 expand_builtin_strcpy. */
3774 static rtx
3775 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3777 return expand_movstr (dest, src, target, /*endp=*/0);
3780 /* Expand a call EXP to the stpcpy builtin.
3781 Return NULL_RTX if we failed the caller should emit a normal call,
3782 otherwise try to get the result in TARGET, if convenient (and in
3783 mode MODE if that's convenient). */
3785 static rtx
3786 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3788 tree dst, src;
3789 location_t loc = EXPR_LOCATION (exp);
3791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3792 return NULL_RTX;
3794 dst = CALL_EXPR_ARG (exp, 0);
3795 src = CALL_EXPR_ARG (exp, 1);
3797 if (warn_stringop_overflow)
3799 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3800 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3801 src, destsize);
3804 /* If return value is ignored, transform stpcpy into strcpy. */
3805 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3807 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3808 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3811 else
3813 tree len, lenp1;
3814 rtx ret;
3816 /* Ensure we get an actual string whose length can be evaluated at
3817 compile-time, not an expression containing a string. This is
3818 because the latter will potentially produce pessimized code
3819 when used to produce the return value. */
3820 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3821 return expand_movstr (dst, src, target, /*endp=*/2);
3823 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3824 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3825 target, exp, /*endp=*/2);
3827 if (ret)
3828 return ret;
3830 if (TREE_CODE (len) == INTEGER_CST)
3832 rtx len_rtx = expand_normal (len);
3834 if (CONST_INT_P (len_rtx))
3836 ret = expand_builtin_strcpy_args (dst, src, target);
3838 if (ret)
3840 if (! target)
3842 if (mode != VOIDmode)
3843 target = gen_reg_rtx (mode);
3844 else
3845 target = gen_reg_rtx (GET_MODE (ret));
3847 if (GET_MODE (target) != GET_MODE (ret))
3848 ret = gen_lowpart (GET_MODE (target), ret);
3850 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3851 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3852 gcc_assert (ret);
3854 return target;
3859 return expand_movstr (dst, src, target, /*endp=*/2);
3863 /* Check a call EXP to the stpncpy built-in for validity.
3864 Return NULL_RTX on both success and failure. */
3866 static rtx
3867 expand_builtin_stpncpy (tree exp, rtx)
3869 if (!validate_arglist (exp,
3870 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3871 || !warn_stringop_overflow)
3872 return NULL_RTX;
3874 /* The source and destination of the call. */
3875 tree dest = CALL_EXPR_ARG (exp, 0);
3876 tree src = CALL_EXPR_ARG (exp, 1);
3878 /* The exact number of bytes to write (not the maximum). */
3879 tree len = CALL_EXPR_ARG (exp, 2);
3881 /* The size of the destination object. */
3882 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3884 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3886 return NULL_RTX;
3889 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3890 bytes from constant string DATA + OFFSET and return it as target
3891 constant. */
3894 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3895 scalar_int_mode mode)
3897 const char *str = (const char *) data;
3899 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3900 return const0_rtx;
3902 return c_readstr (str + offset, mode);
3905 /* Helper to check the sizes of sequences and the destination of calls
3906 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3907 success (no overflow or invalid sizes), false otherwise. */
3909 static bool
3910 check_strncat_sizes (tree exp, tree objsize)
3912 tree dest = CALL_EXPR_ARG (exp, 0);
3913 tree src = CALL_EXPR_ARG (exp, 1);
3914 tree maxread = CALL_EXPR_ARG (exp, 2);
3916 /* Try to determine the range of lengths that the source expression
3917 refers to. */
3918 tree lenrange[2];
3919 get_range_strlen (src, lenrange);
3921 /* Try to verify that the destination is big enough for the shortest
3922 string. */
3924 if (!objsize && warn_stringop_overflow)
3926 /* If it hasn't been provided by __strncat_chk, try to determine
3927 the size of the destination object into which the source is
3928 being copied. */
3929 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3932 /* Add one for the terminating nul. */
3933 tree srclen = (lenrange[0]
3934 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3935 size_one_node)
3936 : NULL_TREE);
3938 /* The strncat function copies at most MAXREAD bytes and always appends
3939 the terminating nul so the specified upper bound should never be equal
3940 to (or greater than) the size of the destination. */
3941 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3942 && tree_int_cst_equal (objsize, maxread))
3944 location_t loc = tree_nonartificial_location (exp);
3945 loc = expansion_point_location_if_in_system_header (loc);
3947 warning_at (loc, OPT_Wstringop_overflow_,
3948 "%K%qD specified bound %E equals destination size",
3949 exp, get_callee_fndecl (exp), maxread);
3951 return false;
3954 if (!srclen
3955 || (maxread && tree_fits_uhwi_p (maxread)
3956 && tree_fits_uhwi_p (srclen)
3957 && tree_int_cst_lt (maxread, srclen)))
3958 srclen = maxread;
3960 /* The number of bytes to write is LEN but check_access will also
3961 check SRCLEN if LEN's value isn't known. */
3962 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3963 objsize);
3966 /* Similar to expand_builtin_strcat, do some very basic size validation
3967 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3968 the built-in expand to a call to the library function. */
3970 static rtx
3971 expand_builtin_strncat (tree exp, rtx)
3973 if (!validate_arglist (exp,
3974 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3975 || !warn_stringop_overflow)
3976 return NULL_RTX;
3978 tree dest = CALL_EXPR_ARG (exp, 0);
3979 tree src = CALL_EXPR_ARG (exp, 1);
3980 /* The upper bound on the number of bytes to write. */
3981 tree maxread = CALL_EXPR_ARG (exp, 2);
3982 /* The length of the source sequence. */
3983 tree slen = c_strlen (src, 1);
3985 /* Try to determine the range of lengths that the source expression
3986 refers to. */
3987 tree lenrange[2];
3988 if (slen)
3989 lenrange[0] = lenrange[1] = slen;
3990 else
3991 get_range_strlen (src, lenrange);
3993 /* Try to verify that the destination is big enough for the shortest
3994 string. First try to determine the size of the destination object
3995 into which the source is being copied. */
3996 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3998 /* Add one for the terminating nul. */
3999 tree srclen = (lenrange[0]
4000 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4001 size_one_node)
4002 : NULL_TREE);
4004 /* The strncat function copies at most MAXREAD bytes and always appends
4005 the terminating nul so the specified upper bound should never be equal
4006 to (or greater than) the size of the destination. */
4007 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4008 && tree_int_cst_equal (destsize, maxread))
4010 location_t loc = tree_nonartificial_location (exp);
4011 loc = expansion_point_location_if_in_system_header (loc);
4013 warning_at (loc, OPT_Wstringop_overflow_,
4014 "%K%qD specified bound %E equals destination size",
4015 exp, get_callee_fndecl (exp), maxread);
4017 return NULL_RTX;
4020 if (!srclen
4021 || (maxread && tree_fits_uhwi_p (maxread)
4022 && tree_fits_uhwi_p (srclen)
4023 && tree_int_cst_lt (maxread, srclen)))
4024 srclen = maxread;
4026 /* The number of bytes to write is SRCLEN. */
4027 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4029 return NULL_RTX;
4032 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4033 NULL_RTX if we failed the caller should emit a normal call. */
4035 static rtx
4036 expand_builtin_strncpy (tree exp, rtx target)
4038 location_t loc = EXPR_LOCATION (exp);
4040 if (validate_arglist (exp,
4041 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4043 tree dest = CALL_EXPR_ARG (exp, 0);
4044 tree src = CALL_EXPR_ARG (exp, 1);
4045 /* The number of bytes to write (not the maximum). */
4046 tree len = CALL_EXPR_ARG (exp, 2);
4047 /* The length of the source sequence. */
4048 tree slen = c_strlen (src, 1);
4050 if (warn_stringop_overflow)
4052 tree destsize = compute_objsize (dest,
4053 warn_stringop_overflow - 1);
4055 /* The number of bytes to write is LEN but check_access will also
4056 check SLEN if LEN's value isn't known. */
4057 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4058 destsize);
4061 /* We must be passed a constant len and src parameter. */
4062 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4063 return NULL_RTX;
4065 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4067 /* We're required to pad with trailing zeros if the requested
4068 len is greater than strlen(s2)+1. In that case try to
4069 use store_by_pieces, if it fails, punt. */
4070 if (tree_int_cst_lt (slen, len))
4072 unsigned int dest_align = get_pointer_alignment (dest);
4073 const char *p = c_getstr (src);
4074 rtx dest_mem;
4076 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4077 || !can_store_by_pieces (tree_to_uhwi (len),
4078 builtin_strncpy_read_str,
4079 CONST_CAST (char *, p),
4080 dest_align, false))
4081 return NULL_RTX;
4083 dest_mem = get_memory_rtx (dest, len);
4084 store_by_pieces (dest_mem, tree_to_uhwi (len),
4085 builtin_strncpy_read_str,
4086 CONST_CAST (char *, p), dest_align, false, 0);
4087 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4088 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4089 return dest_mem;
4092 return NULL_RTX;
4095 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4096 bytes from constant string DATA + OFFSET and return it as target
4097 constant. */
4100 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4101 scalar_int_mode mode)
4103 const char *c = (const char *) data;
4104 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4106 memset (p, *c, GET_MODE_SIZE (mode));
4108 return c_readstr (p, mode);
4111 /* Callback routine for store_by_pieces. Return the RTL of a register
4112 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4113 char value given in the RTL register data. For example, if mode is
4114 4 bytes wide, return the RTL for 0x01010101*data. */
4116 static rtx
4117 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4118 scalar_int_mode mode)
4120 rtx target, coeff;
4121 size_t size;
4122 char *p;
4124 size = GET_MODE_SIZE (mode);
4125 if (size == 1)
4126 return (rtx) data;
4128 p = XALLOCAVEC (char, size);
4129 memset (p, 1, size);
4130 coeff = c_readstr (p, mode);
4132 target = convert_to_mode (mode, (rtx) data, 1);
4133 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4134 return force_reg (mode, target);
4137 /* Expand expression EXP, which is a call to the memset builtin. Return
4138 NULL_RTX if we failed the caller should emit a normal call, otherwise
4139 try to get the result in TARGET, if convenient (and in mode MODE if that's
4140 convenient). */
4142 static rtx
4143 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4145 if (!validate_arglist (exp,
4146 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4147 return NULL_RTX;
4149 tree dest = CALL_EXPR_ARG (exp, 0);
4150 tree val = CALL_EXPR_ARG (exp, 1);
4151 tree len = CALL_EXPR_ARG (exp, 2);
4153 check_memop_access (exp, dest, NULL_TREE, len);
4155 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4158 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4159 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4160 try to get the result in TARGET, if convenient (and in mode MODE if that's
4161 convenient). */
4163 static rtx
4164 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4166 if (!validate_arglist (exp,
4167 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4168 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4169 return NULL_RTX;
4170 else
4172 tree dest = CALL_EXPR_ARG (exp, 0);
4173 tree val = CALL_EXPR_ARG (exp, 2);
4174 tree len = CALL_EXPR_ARG (exp, 3);
4175 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4177 /* Return src bounds with the result. */
4178 if (res)
4180 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4181 expand_normal (CALL_EXPR_ARG (exp, 1)));
4182 res = chkp_join_splitted_slot (res, bnd);
4184 return res;
4188 /* Helper function to do the actual work for expand_builtin_memset. The
4189 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4190 so that this can also be called without constructing an actual CALL_EXPR.
4191 The other arguments and return value are the same as for
4192 expand_builtin_memset. */
4194 static rtx
4195 expand_builtin_memset_args (tree dest, tree val, tree len,
4196 rtx target, machine_mode mode, tree orig_exp)
4198 tree fndecl, fn;
4199 enum built_in_function fcode;
4200 machine_mode val_mode;
4201 char c;
4202 unsigned int dest_align;
4203 rtx dest_mem, dest_addr, len_rtx;
4204 HOST_WIDE_INT expected_size = -1;
4205 unsigned int expected_align = 0;
4206 unsigned HOST_WIDE_INT min_size;
4207 unsigned HOST_WIDE_INT max_size;
4208 unsigned HOST_WIDE_INT probable_max_size;
4210 dest_align = get_pointer_alignment (dest);
4212 /* If DEST is not a pointer type, don't do this operation in-line. */
4213 if (dest_align == 0)
4214 return NULL_RTX;
4216 if (currently_expanding_gimple_stmt)
4217 stringop_block_profile (currently_expanding_gimple_stmt,
4218 &expected_align, &expected_size);
4220 if (expected_align < dest_align)
4221 expected_align = dest_align;
4223 /* If the LEN parameter is zero, return DEST. */
4224 if (integer_zerop (len))
4226 /* Evaluate and ignore VAL in case it has side-effects. */
4227 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4228 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4231 /* Stabilize the arguments in case we fail. */
4232 dest = builtin_save_expr (dest);
4233 val = builtin_save_expr (val);
4234 len = builtin_save_expr (len);
4236 len_rtx = expand_normal (len);
4237 determine_block_size (len, len_rtx, &min_size, &max_size,
4238 &probable_max_size);
4239 dest_mem = get_memory_rtx (dest, len);
4240 val_mode = TYPE_MODE (unsigned_char_type_node);
4242 if (TREE_CODE (val) != INTEGER_CST)
4244 rtx val_rtx;
4246 val_rtx = expand_normal (val);
4247 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4249 /* Assume that we can memset by pieces if we can store
4250 * the coefficients by pieces (in the required modes).
4251 * We can't pass builtin_memset_gen_str as that emits RTL. */
4252 c = 1;
4253 if (tree_fits_uhwi_p (len)
4254 && can_store_by_pieces (tree_to_uhwi (len),
4255 builtin_memset_read_str, &c, dest_align,
4256 true))
4258 val_rtx = force_reg (val_mode, val_rtx);
4259 store_by_pieces (dest_mem, tree_to_uhwi (len),
4260 builtin_memset_gen_str, val_rtx, dest_align,
4261 true, 0);
4263 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4264 dest_align, expected_align,
4265 expected_size, min_size, max_size,
4266 probable_max_size))
4267 goto do_libcall;
4269 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4270 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4271 return dest_mem;
4274 if (target_char_cast (val, &c))
4275 goto do_libcall;
4277 if (c)
4279 if (tree_fits_uhwi_p (len)
4280 && can_store_by_pieces (tree_to_uhwi (len),
4281 builtin_memset_read_str, &c, dest_align,
4282 true))
4283 store_by_pieces (dest_mem, tree_to_uhwi (len),
4284 builtin_memset_read_str, &c, dest_align, true, 0);
4285 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4286 gen_int_mode (c, val_mode),
4287 dest_align, expected_align,
4288 expected_size, min_size, max_size,
4289 probable_max_size))
4290 goto do_libcall;
4292 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4293 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4294 return dest_mem;
4297 set_mem_align (dest_mem, dest_align);
4298 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4299 CALL_EXPR_TAILCALL (orig_exp)
4300 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4301 expected_align, expected_size,
4302 min_size, max_size,
4303 probable_max_size);
4305 if (dest_addr == 0)
4307 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4308 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4311 return dest_addr;
4313 do_libcall:
4314 fndecl = get_callee_fndecl (orig_exp);
4315 fcode = DECL_FUNCTION_CODE (fndecl);
4316 if (fcode == BUILT_IN_MEMSET
4317 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4318 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4319 dest, val, len);
4320 else if (fcode == BUILT_IN_BZERO)
4321 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4322 dest, len);
4323 else
4324 gcc_unreachable ();
4325 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4326 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4327 return expand_call (fn, target, target == const0_rtx);
4330 /* Expand expression EXP, which is a call to the bzero builtin. Return
4331 NULL_RTX if we failed the caller should emit a normal call. */
4333 static rtx
4334 expand_builtin_bzero (tree exp)
4336 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4337 return NULL_RTX;
4339 tree dest = CALL_EXPR_ARG (exp, 0);
4340 tree size = CALL_EXPR_ARG (exp, 1);
4342 check_memop_access (exp, dest, NULL_TREE, size);
4344 /* New argument list transforming bzero(ptr x, int y) to
4345 memset(ptr x, int 0, size_t y). This is done this way
4346 so that if it isn't expanded inline, we fallback to
4347 calling bzero instead of memset. */
4349 location_t loc = EXPR_LOCATION (exp);
4351 return expand_builtin_memset_args (dest, integer_zero_node,
4352 fold_convert_loc (loc,
4353 size_type_node, size),
4354 const0_rtx, VOIDmode, exp);
4357 /* Try to expand cmpstr operation ICODE with the given operands.
4358 Return the result rtx on success, otherwise return null. */
4360 static rtx
4361 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4362 HOST_WIDE_INT align)
4364 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4366 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4367 target = NULL_RTX;
4369 struct expand_operand ops[4];
4370 create_output_operand (&ops[0], target, insn_mode);
4371 create_fixed_operand (&ops[1], arg1_rtx);
4372 create_fixed_operand (&ops[2], arg2_rtx);
4373 create_integer_operand (&ops[3], align);
4374 if (maybe_expand_insn (icode, 4, ops))
4375 return ops[0].value;
4376 return NULL_RTX;
4379 /* Expand expression EXP, which is a call to the memcmp built-in function.
4380 Return NULL_RTX if we failed and the caller should emit a normal call,
4381 otherwise try to get the result in TARGET, if convenient.
4382 RESULT_EQ is true if we can relax the returned value to be either zero
4383 or nonzero, without caring about the sign. */
4385 static rtx
4386 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4388 if (!validate_arglist (exp,
4389 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4390 return NULL_RTX;
4392 tree arg1 = CALL_EXPR_ARG (exp, 0);
4393 tree arg2 = CALL_EXPR_ARG (exp, 1);
4394 tree len = CALL_EXPR_ARG (exp, 2);
4396 /* Diagnose calls where the specified length exceeds the size of either
4397 object. */
4398 if (warn_stringop_overflow)
4400 tree size = compute_objsize (arg1, 0);
4401 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4402 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4404 size = compute_objsize (arg2, 0);
4405 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4406 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4410 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4411 location_t loc = EXPR_LOCATION (exp);
4413 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4414 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4416 /* If we don't have POINTER_TYPE, call the function. */
4417 if (arg1_align == 0 || arg2_align == 0)
4418 return NULL_RTX;
4420 rtx arg1_rtx = get_memory_rtx (arg1, len);
4421 rtx arg2_rtx = get_memory_rtx (arg2, len);
4422 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4424 /* Set MEM_SIZE as appropriate. */
4425 if (CONST_INT_P (len_rtx))
4427 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4428 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4431 by_pieces_constfn constfn = NULL;
4433 const char *src_str = c_getstr (arg2);
4434 if (result_eq && src_str == NULL)
4436 src_str = c_getstr (arg1);
4437 if (src_str != NULL)
4438 std::swap (arg1_rtx, arg2_rtx);
4441 /* If SRC is a string constant and block move would be done
4442 by pieces, we can avoid loading the string from memory
4443 and only stored the computed constants. */
4444 if (src_str
4445 && CONST_INT_P (len_rtx)
4446 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4447 constfn = builtin_memcpy_read_str;
4449 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4450 TREE_TYPE (len), target,
4451 result_eq, constfn,
4452 CONST_CAST (char *, src_str));
4454 if (result)
4456 /* Return the value in the proper mode for this function. */
4457 if (GET_MODE (result) == mode)
4458 return result;
4460 if (target != 0)
4462 convert_move (target, result, 0);
4463 return target;
4466 return convert_to_mode (mode, result, 0);
4469 return NULL_RTX;
4472 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4473 if we failed the caller should emit a normal call, otherwise try to get
4474 the result in TARGET, if convenient. */
4476 static rtx
4477 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4479 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4480 return NULL_RTX;
4482 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4483 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4484 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4485 return NULL_RTX;
4487 tree arg1 = CALL_EXPR_ARG (exp, 0);
4488 tree arg2 = CALL_EXPR_ARG (exp, 1);
4490 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4491 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4493 /* If we don't have POINTER_TYPE, call the function. */
4494 if (arg1_align == 0 || arg2_align == 0)
4495 return NULL_RTX;
4497 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4498 arg1 = builtin_save_expr (arg1);
4499 arg2 = builtin_save_expr (arg2);
4501 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4502 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4504 rtx result = NULL_RTX;
4505 /* Try to call cmpstrsi. */
4506 if (cmpstr_icode != CODE_FOR_nothing)
4507 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4508 MIN (arg1_align, arg2_align));
4510 /* Try to determine at least one length and call cmpstrnsi. */
4511 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4513 tree len;
4514 rtx arg3_rtx;
4516 tree len1 = c_strlen (arg1, 1);
4517 tree len2 = c_strlen (arg2, 1);
4519 if (len1)
4520 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4521 if (len2)
4522 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4524 /* If we don't have a constant length for the first, use the length
4525 of the second, if we know it. We don't require a constant for
4526 this case; some cost analysis could be done if both are available
4527 but neither is constant. For now, assume they're equally cheap,
4528 unless one has side effects. If both strings have constant lengths,
4529 use the smaller. */
4531 if (!len1)
4532 len = len2;
4533 else if (!len2)
4534 len = len1;
4535 else if (TREE_SIDE_EFFECTS (len1))
4536 len = len2;
4537 else if (TREE_SIDE_EFFECTS (len2))
4538 len = len1;
4539 else if (TREE_CODE (len1) != INTEGER_CST)
4540 len = len2;
4541 else if (TREE_CODE (len2) != INTEGER_CST)
4542 len = len1;
4543 else if (tree_int_cst_lt (len1, len2))
4544 len = len1;
4545 else
4546 len = len2;
4548 /* If both arguments have side effects, we cannot optimize. */
4549 if (len && !TREE_SIDE_EFFECTS (len))
4551 arg3_rtx = expand_normal (len);
4552 result = expand_cmpstrn_or_cmpmem
4553 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4554 arg3_rtx, MIN (arg1_align, arg2_align));
4558 /* Check to see if the argument was declared attribute nonstring
4559 and if so, issue a warning since at this point it's not known
4560 to be nul-terminated. */
4561 tree fndecl = get_callee_fndecl (exp);
4562 maybe_warn_nonstring_arg (fndecl, exp);
4564 if (result)
4566 /* Return the value in the proper mode for this function. */
4567 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4568 if (GET_MODE (result) == mode)
4569 return result;
4570 if (target == 0)
4571 return convert_to_mode (mode, result, 0);
4572 convert_move (target, result, 0);
4573 return target;
4576 /* Expand the library call ourselves using a stabilized argument
4577 list to avoid re-evaluating the function's arguments twice. */
4578 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4579 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4580 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4581 return expand_call (fn, target, target == const0_rtx);
4584 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4585 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4586 the result in TARGET, if convenient. */
4588 static rtx
4589 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4590 ATTRIBUTE_UNUSED machine_mode mode)
4592 if (!validate_arglist (exp,
4593 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4594 return NULL_RTX;
4596 /* If c_strlen can determine an expression for one of the string
4597 lengths, and it doesn't have side effects, then emit cmpstrnsi
4598 using length MIN(strlen(string)+1, arg3). */
4599 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4600 if (cmpstrn_icode == CODE_FOR_nothing)
4601 return NULL_RTX;
4603 tree len;
4605 tree arg1 = CALL_EXPR_ARG (exp, 0);
4606 tree arg2 = CALL_EXPR_ARG (exp, 1);
4607 tree arg3 = CALL_EXPR_ARG (exp, 2);
4609 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4610 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4612 tree len1 = c_strlen (arg1, 1);
4613 tree len2 = c_strlen (arg2, 1);
4615 location_t loc = EXPR_LOCATION (exp);
4617 if (len1)
4618 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4619 if (len2)
4620 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4622 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4624 /* If we don't have a constant length for the first, use the length
4625 of the second, if we know it. If neither string is constant length,
4626 use the given length argument. We don't require a constant for
4627 this case; some cost analysis could be done if both are available
4628 but neither is constant. For now, assume they're equally cheap,
4629 unless one has side effects. If both strings have constant lengths,
4630 use the smaller. */
4632 if (!len1 && !len2)
4633 len = len3;
4634 else if (!len1)
4635 len = len2;
4636 else if (!len2)
4637 len = len1;
4638 else if (TREE_SIDE_EFFECTS (len1))
4639 len = len2;
4640 else if (TREE_SIDE_EFFECTS (len2))
4641 len = len1;
4642 else if (TREE_CODE (len1) != INTEGER_CST)
4643 len = len2;
4644 else if (TREE_CODE (len2) != INTEGER_CST)
4645 len = len1;
4646 else if (tree_int_cst_lt (len1, len2))
4647 len = len1;
4648 else
4649 len = len2;
4651 /* If we are not using the given length, we must incorporate it here.
4652 The actual new length parameter will be MIN(len,arg3) in this case. */
4653 if (len != len3)
4654 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4655 rtx arg1_rtx = get_memory_rtx (arg1, len);
4656 rtx arg2_rtx = get_memory_rtx (arg2, len);
4657 rtx arg3_rtx = expand_normal (len);
4658 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4659 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4660 MIN (arg1_align, arg2_align));
4662 /* Check to see if the argument was declared attribute nonstring
4663 and if so, issue a warning since at this point it's not known
4664 to be nul-terminated. */
4665 tree fndecl = get_callee_fndecl (exp);
4666 maybe_warn_nonstring_arg (fndecl, exp);
4668 if (result)
4670 /* Return the value in the proper mode for this function. */
4671 mode = TYPE_MODE (TREE_TYPE (exp));
4672 if (GET_MODE (result) == mode)
4673 return result;
4674 if (target == 0)
4675 return convert_to_mode (mode, result, 0);
4676 convert_move (target, result, 0);
4677 return target;
4680 /* Expand the library call ourselves using a stabilized argument
4681 list to avoid re-evaluating the function's arguments twice. */
4682 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4683 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4684 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4685 return expand_call (fn, target, target == const0_rtx);
4688 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4689 if that's convenient. */
4692 expand_builtin_saveregs (void)
4694 rtx val;
4695 rtx_insn *seq;
4697 /* Don't do __builtin_saveregs more than once in a function.
4698 Save the result of the first call and reuse it. */
4699 if (saveregs_value != 0)
4700 return saveregs_value;
4702 /* When this function is called, it means that registers must be
4703 saved on entry to this function. So we migrate the call to the
4704 first insn of this function. */
4706 start_sequence ();
4708 /* Do whatever the machine needs done in this case. */
4709 val = targetm.calls.expand_builtin_saveregs ();
4711 seq = get_insns ();
4712 end_sequence ();
4714 saveregs_value = val;
4716 /* Put the insns after the NOTE that starts the function. If this
4717 is inside a start_sequence, make the outer-level insn chain current, so
4718 the code is placed at the start of the function. */
4719 push_topmost_sequence ();
4720 emit_insn_after (seq, entry_of_function ());
4721 pop_topmost_sequence ();
4723 return val;
4726 /* Expand a call to __builtin_next_arg. */
4728 static rtx
4729 expand_builtin_next_arg (void)
4731 /* Checking arguments is already done in fold_builtin_next_arg
4732 that must be called before this function. */
4733 return expand_binop (ptr_mode, add_optab,
4734 crtl->args.internal_arg_pointer,
4735 crtl->args.arg_offset_rtx,
4736 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4739 /* Make it easier for the backends by protecting the valist argument
4740 from multiple evaluations. */
4742 static tree
4743 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4745 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4747 /* The current way of determining the type of valist is completely
4748 bogus. We should have the information on the va builtin instead. */
4749 if (!vatype)
4750 vatype = targetm.fn_abi_va_list (cfun->decl);
4752 if (TREE_CODE (vatype) == ARRAY_TYPE)
4754 if (TREE_SIDE_EFFECTS (valist))
4755 valist = save_expr (valist);
4757 /* For this case, the backends will be expecting a pointer to
4758 vatype, but it's possible we've actually been given an array
4759 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4760 So fix it. */
4761 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4763 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4764 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4767 else
4769 tree pt = build_pointer_type (vatype);
4771 if (! needs_lvalue)
4773 if (! TREE_SIDE_EFFECTS (valist))
4774 return valist;
4776 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4777 TREE_SIDE_EFFECTS (valist) = 1;
4780 if (TREE_SIDE_EFFECTS (valist))
4781 valist = save_expr (valist);
4782 valist = fold_build2_loc (loc, MEM_REF,
4783 vatype, valist, build_int_cst (pt, 0));
4786 return valist;
4789 /* The "standard" definition of va_list is void*. */
4791 tree
4792 std_build_builtin_va_list (void)
4794 return ptr_type_node;
4797 /* The "standard" abi va_list is va_list_type_node. */
4799 tree
4800 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4802 return va_list_type_node;
4805 /* The "standard" type of va_list is va_list_type_node. */
4807 tree
4808 std_canonical_va_list_type (tree type)
4810 tree wtype, htype;
4812 wtype = va_list_type_node;
4813 htype = type;
4815 if (TREE_CODE (wtype) == ARRAY_TYPE)
4817 /* If va_list is an array type, the argument may have decayed
4818 to a pointer type, e.g. by being passed to another function.
4819 In that case, unwrap both types so that we can compare the
4820 underlying records. */
4821 if (TREE_CODE (htype) == ARRAY_TYPE
4822 || POINTER_TYPE_P (htype))
4824 wtype = TREE_TYPE (wtype);
4825 htype = TREE_TYPE (htype);
4828 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4829 return va_list_type_node;
4831 return NULL_TREE;
4834 /* The "standard" implementation of va_start: just assign `nextarg' to
4835 the variable. */
4837 void
4838 std_expand_builtin_va_start (tree valist, rtx nextarg)
4840 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4841 convert_move (va_r, nextarg, 0);
4843 /* We do not have any valid bounds for the pointer, so
4844 just store zero bounds for it. */
4845 if (chkp_function_instrumented_p (current_function_decl))
4846 chkp_expand_bounds_reset_for_mem (valist,
4847 make_tree (TREE_TYPE (valist),
4848 nextarg));
4851 /* Expand EXP, a call to __builtin_va_start. */
4853 static rtx
4854 expand_builtin_va_start (tree exp)
4856 rtx nextarg;
4857 tree valist;
4858 location_t loc = EXPR_LOCATION (exp);
4860 if (call_expr_nargs (exp) < 2)
4862 error_at (loc, "too few arguments to function %<va_start%>");
4863 return const0_rtx;
4866 if (fold_builtin_next_arg (exp, true))
4867 return const0_rtx;
4869 nextarg = expand_builtin_next_arg ();
4870 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4872 if (targetm.expand_builtin_va_start)
4873 targetm.expand_builtin_va_start (valist, nextarg);
4874 else
4875 std_expand_builtin_va_start (valist, nextarg);
4877 return const0_rtx;
4880 /* Expand EXP, a call to __builtin_va_end. */
4882 static rtx
4883 expand_builtin_va_end (tree exp)
4885 tree valist = CALL_EXPR_ARG (exp, 0);
4887 /* Evaluate for side effects, if needed. I hate macros that don't
4888 do that. */
4889 if (TREE_SIDE_EFFECTS (valist))
4890 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4892 return const0_rtx;
4895 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4896 builtin rather than just as an assignment in stdarg.h because of the
4897 nastiness of array-type va_list types. */
4899 static rtx
4900 expand_builtin_va_copy (tree exp)
4902 tree dst, src, t;
4903 location_t loc = EXPR_LOCATION (exp);
4905 dst = CALL_EXPR_ARG (exp, 0);
4906 src = CALL_EXPR_ARG (exp, 1);
4908 dst = stabilize_va_list_loc (loc, dst, 1);
4909 src = stabilize_va_list_loc (loc, src, 0);
4911 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4913 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4915 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4916 TREE_SIDE_EFFECTS (t) = 1;
4917 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4919 else
4921 rtx dstb, srcb, size;
4923 /* Evaluate to pointers. */
4924 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4925 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4926 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4927 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4929 dstb = convert_memory_address (Pmode, dstb);
4930 srcb = convert_memory_address (Pmode, srcb);
4932 /* "Dereference" to BLKmode memories. */
4933 dstb = gen_rtx_MEM (BLKmode, dstb);
4934 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4935 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4936 srcb = gen_rtx_MEM (BLKmode, srcb);
4937 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4938 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4940 /* Copy. */
4941 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4944 return const0_rtx;
4947 /* Expand a call to one of the builtin functions __builtin_frame_address or
4948 __builtin_return_address. */
4950 static rtx
4951 expand_builtin_frame_address (tree fndecl, tree exp)
4953 /* The argument must be a nonnegative integer constant.
4954 It counts the number of frames to scan up the stack.
4955 The value is either the frame pointer value or the return
4956 address saved in that frame. */
4957 if (call_expr_nargs (exp) == 0)
4958 /* Warning about missing arg was already issued. */
4959 return const0_rtx;
4960 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4962 error ("invalid argument to %qD", fndecl);
4963 return const0_rtx;
4965 else
4967 /* Number of frames to scan up the stack. */
4968 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4970 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4972 /* Some ports cannot access arbitrary stack frames. */
4973 if (tem == NULL)
4975 warning (0, "unsupported argument to %qD", fndecl);
4976 return const0_rtx;
4979 if (count)
4981 /* Warn since no effort is made to ensure that any frame
4982 beyond the current one exists or can be safely reached. */
4983 warning (OPT_Wframe_address, "calling %qD with "
4984 "a nonzero argument is unsafe", fndecl);
4987 /* For __builtin_frame_address, return what we've got. */
4988 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4989 return tem;
4991 if (!REG_P (tem)
4992 && ! CONSTANT_P (tem))
4993 tem = copy_addr_to_reg (tem);
4994 return tem;
4998 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4999 failed and the caller should emit a normal call. */
5001 static rtx
5002 expand_builtin_alloca (tree exp)
5004 rtx op0;
5005 rtx result;
5006 unsigned int align;
5007 tree fndecl = get_callee_fndecl (exp);
5008 HOST_WIDE_INT max_size;
5009 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5010 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5011 bool valid_arglist
5012 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5013 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5014 VOID_TYPE)
5015 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5016 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5017 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5019 if (!valid_arglist)
5020 return NULL_RTX;
5022 if ((alloca_for_var && !warn_vla_limit)
5023 || (!alloca_for_var && !warn_alloca_limit))
5025 /* -Walloca-larger-than and -Wvla-larger-than settings override
5026 the more general -Walloc-size-larger-than so unless either of
5027 the former options is specified check the alloca arguments for
5028 overflow. */
5029 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5030 int idx[] = { 0, -1 };
5031 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5034 /* Compute the argument. */
5035 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5037 /* Compute the alignment. */
5038 align = (fcode == BUILT_IN_ALLOCA
5039 ? BIGGEST_ALIGNMENT
5040 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5042 /* Compute the maximum size. */
5043 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5044 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5045 : -1);
5047 /* Allocate the desired space. If the allocation stems from the declaration
5048 of a variable-sized object, it cannot accumulate. */
5049 result
5050 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5051 result = convert_memory_address (ptr_mode, result);
5053 return result;
5056 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5057 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5058 dummy value into second parameter relying on this function to perform the
5059 change. See motivation for this in comment to handle_builtin_stack_restore
5060 function. */
5062 static rtx
5063 expand_asan_emit_allocas_unpoison (tree exp)
5065 tree arg0 = CALL_EXPR_ARG (exp, 0);
5066 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5067 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5068 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5069 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5070 top, ptr_mode, bot, ptr_mode);
5071 return ret;
5074 /* Expand a call to bswap builtin in EXP.
5075 Return NULL_RTX if a normal call should be emitted rather than expanding the
5076 function in-line. If convenient, the result should be placed in TARGET.
5077 SUBTARGET may be used as the target for computing one of EXP's operands. */
5079 static rtx
5080 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5081 rtx subtarget)
5083 tree arg;
5084 rtx op0;
5086 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5087 return NULL_RTX;
5089 arg = CALL_EXPR_ARG (exp, 0);
5090 op0 = expand_expr (arg,
5091 subtarget && GET_MODE (subtarget) == target_mode
5092 ? subtarget : NULL_RTX,
5093 target_mode, EXPAND_NORMAL);
5094 if (GET_MODE (op0) != target_mode)
5095 op0 = convert_to_mode (target_mode, op0, 1);
5097 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5099 gcc_assert (target);
5101 return convert_to_mode (target_mode, target, 1);
5104 /* Expand a call to a unary builtin in EXP.
5105 Return NULL_RTX if a normal call should be emitted rather than expanding the
5106 function in-line. If convenient, the result should be placed in TARGET.
5107 SUBTARGET may be used as the target for computing one of EXP's operands. */
5109 static rtx
5110 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5111 rtx subtarget, optab op_optab)
5113 rtx op0;
5115 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5116 return NULL_RTX;
5118 /* Compute the argument. */
5119 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5120 (subtarget
5121 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5122 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5123 VOIDmode, EXPAND_NORMAL);
5124 /* Compute op, into TARGET if possible.
5125 Set TARGET to wherever the result comes back. */
5126 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5127 op_optab, op0, target, op_optab != clrsb_optab);
5128 gcc_assert (target);
5130 return convert_to_mode (target_mode, target, 0);
5133 /* Expand a call to __builtin_expect. We just return our argument
5134 as the builtin_expect semantic should've been already executed by
5135 tree branch prediction pass. */
5137 static rtx
5138 expand_builtin_expect (tree exp, rtx target)
5140 tree arg;
5142 if (call_expr_nargs (exp) < 2)
5143 return const0_rtx;
5144 arg = CALL_EXPR_ARG (exp, 0);
5146 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5147 /* When guessing was done, the hints should be already stripped away. */
5148 gcc_assert (!flag_guess_branch_prob
5149 || optimize == 0 || seen_error ());
5150 return target;
5153 /* Expand a call to __builtin_assume_aligned. We just return our first
5154 argument as the builtin_assume_aligned semantic should've been already
5155 executed by CCP. */
5157 static rtx
5158 expand_builtin_assume_aligned (tree exp, rtx target)
5160 if (call_expr_nargs (exp) < 2)
5161 return const0_rtx;
5162 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5163 EXPAND_NORMAL);
5164 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5165 && (call_expr_nargs (exp) < 3
5166 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5167 return target;
5170 void
5171 expand_builtin_trap (void)
5173 if (targetm.have_trap ())
5175 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5176 /* For trap insns when not accumulating outgoing args force
5177 REG_ARGS_SIZE note to prevent crossjumping of calls with
5178 different args sizes. */
5179 if (!ACCUMULATE_OUTGOING_ARGS)
5180 add_args_size_note (insn, stack_pointer_delta);
5182 else
5184 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5185 tree call_expr = build_call_expr (fn, 0);
5186 expand_call (call_expr, NULL_RTX, false);
5189 emit_barrier ();
5192 /* Expand a call to __builtin_unreachable. We do nothing except emit
5193 a barrier saying that control flow will not pass here.
5195 It is the responsibility of the program being compiled to ensure
5196 that control flow does never reach __builtin_unreachable. */
5197 static void
5198 expand_builtin_unreachable (void)
5200 emit_barrier ();
5203 /* Expand EXP, a call to fabs, fabsf or fabsl.
5204 Return NULL_RTX if a normal call should be emitted rather than expanding
5205 the function inline. If convenient, the result should be placed
5206 in TARGET. SUBTARGET may be used as the target for computing
5207 the operand. */
5209 static rtx
5210 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5212 machine_mode mode;
5213 tree arg;
5214 rtx op0;
5216 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5217 return NULL_RTX;
5219 arg = CALL_EXPR_ARG (exp, 0);
5220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5221 mode = TYPE_MODE (TREE_TYPE (arg));
5222 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5223 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5226 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5227 Return NULL is a normal call should be emitted rather than expanding the
5228 function inline. If convenient, the result should be placed in TARGET.
5229 SUBTARGET may be used as the target for computing the operand. */
5231 static rtx
5232 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5234 rtx op0, op1;
5235 tree arg;
5237 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5238 return NULL_RTX;
5240 arg = CALL_EXPR_ARG (exp, 0);
5241 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5243 arg = CALL_EXPR_ARG (exp, 1);
5244 op1 = expand_normal (arg);
5246 return expand_copysign (op0, op1, target);
5249 /* Expand a call to __builtin___clear_cache. */
5251 static rtx
5252 expand_builtin___clear_cache (tree exp)
5254 if (!targetm.code_for_clear_cache)
5256 #ifdef CLEAR_INSN_CACHE
5257 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5258 does something. Just do the default expansion to a call to
5259 __clear_cache(). */
5260 return NULL_RTX;
5261 #else
5262 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5263 does nothing. There is no need to call it. Do nothing. */
5264 return const0_rtx;
5265 #endif /* CLEAR_INSN_CACHE */
5268 /* We have a "clear_cache" insn, and it will handle everything. */
5269 tree begin, end;
5270 rtx begin_rtx, end_rtx;
5272 /* We must not expand to a library call. If we did, any
5273 fallback library function in libgcc that might contain a call to
5274 __builtin___clear_cache() would recurse infinitely. */
5275 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5277 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5278 return const0_rtx;
5281 if (targetm.have_clear_cache ())
5283 struct expand_operand ops[2];
5285 begin = CALL_EXPR_ARG (exp, 0);
5286 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5288 end = CALL_EXPR_ARG (exp, 1);
5289 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5291 create_address_operand (&ops[0], begin_rtx);
5292 create_address_operand (&ops[1], end_rtx);
5293 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5294 return const0_rtx;
5296 return const0_rtx;
5299 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5301 static rtx
5302 round_trampoline_addr (rtx tramp)
5304 rtx temp, addend, mask;
5306 /* If we don't need too much alignment, we'll have been guaranteed
5307 proper alignment by get_trampoline_type. */
5308 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5309 return tramp;
5311 /* Round address up to desired boundary. */
5312 temp = gen_reg_rtx (Pmode);
5313 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5314 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5316 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5317 temp, 0, OPTAB_LIB_WIDEN);
5318 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5319 temp, 0, OPTAB_LIB_WIDEN);
5321 return tramp;
5324 static rtx
5325 expand_builtin_init_trampoline (tree exp, bool onstack)
5327 tree t_tramp, t_func, t_chain;
5328 rtx m_tramp, r_tramp, r_chain, tmp;
5330 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5331 POINTER_TYPE, VOID_TYPE))
5332 return NULL_RTX;
5334 t_tramp = CALL_EXPR_ARG (exp, 0);
5335 t_func = CALL_EXPR_ARG (exp, 1);
5336 t_chain = CALL_EXPR_ARG (exp, 2);
5338 r_tramp = expand_normal (t_tramp);
5339 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5340 MEM_NOTRAP_P (m_tramp) = 1;
5342 /* If ONSTACK, the TRAMP argument should be the address of a field
5343 within the local function's FRAME decl. Either way, let's see if
5344 we can fill in the MEM_ATTRs for this memory. */
5345 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5346 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5348 /* Creator of a heap trampoline is responsible for making sure the
5349 address is aligned to at least STACK_BOUNDARY. Normally malloc
5350 will ensure this anyhow. */
5351 tmp = round_trampoline_addr (r_tramp);
5352 if (tmp != r_tramp)
5354 m_tramp = change_address (m_tramp, BLKmode, tmp);
5355 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5356 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5359 /* The FUNC argument should be the address of the nested function.
5360 Extract the actual function decl to pass to the hook. */
5361 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5362 t_func = TREE_OPERAND (t_func, 0);
5363 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5365 r_chain = expand_normal (t_chain);
5367 /* Generate insns to initialize the trampoline. */
5368 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5370 if (onstack)
5372 trampolines_created = 1;
5374 if (targetm.calls.custom_function_descriptors != 0)
5375 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5376 "trampoline generated for nested function %qD", t_func);
5379 return const0_rtx;
5382 static rtx
5383 expand_builtin_adjust_trampoline (tree exp)
5385 rtx tramp;
5387 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5388 return NULL_RTX;
5390 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5391 tramp = round_trampoline_addr (tramp);
5392 if (targetm.calls.trampoline_adjust_address)
5393 tramp = targetm.calls.trampoline_adjust_address (tramp);
5395 return tramp;
5398 /* Expand a call to the builtin descriptor initialization routine.
5399 A descriptor is made up of a couple of pointers to the static
5400 chain and the code entry in this order. */
5402 static rtx
5403 expand_builtin_init_descriptor (tree exp)
5405 tree t_descr, t_func, t_chain;
5406 rtx m_descr, r_descr, r_func, r_chain;
5408 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5409 VOID_TYPE))
5410 return NULL_RTX;
5412 t_descr = CALL_EXPR_ARG (exp, 0);
5413 t_func = CALL_EXPR_ARG (exp, 1);
5414 t_chain = CALL_EXPR_ARG (exp, 2);
5416 r_descr = expand_normal (t_descr);
5417 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5418 MEM_NOTRAP_P (m_descr) = 1;
5420 r_func = expand_normal (t_func);
5421 r_chain = expand_normal (t_chain);
5423 /* Generate insns to initialize the descriptor. */
5424 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5425 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5426 POINTER_SIZE / BITS_PER_UNIT), r_func);
5428 return const0_rtx;
5431 /* Expand a call to the builtin descriptor adjustment routine. */
5433 static rtx
5434 expand_builtin_adjust_descriptor (tree exp)
5436 rtx tramp;
5438 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5439 return NULL_RTX;
5441 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5443 /* Unalign the descriptor to allow runtime identification. */
5444 tramp = plus_constant (ptr_mode, tramp,
5445 targetm.calls.custom_function_descriptors);
5447 return force_operand (tramp, NULL_RTX);
5450 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5451 function. The function first checks whether the back end provides
5452 an insn to implement signbit for the respective mode. If not, it
5453 checks whether the floating point format of the value is such that
5454 the sign bit can be extracted. If that is not the case, error out.
5455 EXP is the expression that is a call to the builtin function; if
5456 convenient, the result should be placed in TARGET. */
5457 static rtx
5458 expand_builtin_signbit (tree exp, rtx target)
5460 const struct real_format *fmt;
5461 scalar_float_mode fmode;
5462 scalar_int_mode rmode, imode;
5463 tree arg;
5464 int word, bitpos;
5465 enum insn_code icode;
5466 rtx temp;
5467 location_t loc = EXPR_LOCATION (exp);
5469 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5470 return NULL_RTX;
5472 arg = CALL_EXPR_ARG (exp, 0);
5473 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5474 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5475 fmt = REAL_MODE_FORMAT (fmode);
5477 arg = builtin_save_expr (arg);
5479 /* Expand the argument yielding a RTX expression. */
5480 temp = expand_normal (arg);
5482 /* Check if the back end provides an insn that handles signbit for the
5483 argument's mode. */
5484 icode = optab_handler (signbit_optab, fmode);
5485 if (icode != CODE_FOR_nothing)
5487 rtx_insn *last = get_last_insn ();
5488 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5489 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5490 return target;
5491 delete_insns_since (last);
5494 /* For floating point formats without a sign bit, implement signbit
5495 as "ARG < 0.0". */
5496 bitpos = fmt->signbit_ro;
5497 if (bitpos < 0)
5499 /* But we can't do this if the format supports signed zero. */
5500 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5502 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5503 build_real (TREE_TYPE (arg), dconst0));
5504 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5507 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5509 imode = int_mode_for_mode (fmode).require ();
5510 temp = gen_lowpart (imode, temp);
5512 else
5514 imode = word_mode;
5515 /* Handle targets with different FP word orders. */
5516 if (FLOAT_WORDS_BIG_ENDIAN)
5517 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5518 else
5519 word = bitpos / BITS_PER_WORD;
5520 temp = operand_subword_force (temp, word, fmode);
5521 bitpos = bitpos % BITS_PER_WORD;
5524 /* Force the intermediate word_mode (or narrower) result into a
5525 register. This avoids attempting to create paradoxical SUBREGs
5526 of floating point modes below. */
5527 temp = force_reg (imode, temp);
5529 /* If the bitpos is within the "result mode" lowpart, the operation
5530 can be implement with a single bitwise AND. Otherwise, we need
5531 a right shift and an AND. */
5533 if (bitpos < GET_MODE_BITSIZE (rmode))
5535 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5537 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5538 temp = gen_lowpart (rmode, temp);
5539 temp = expand_binop (rmode, and_optab, temp,
5540 immed_wide_int_const (mask, rmode),
5541 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5543 else
5545 /* Perform a logical right shift to place the signbit in the least
5546 significant bit, then truncate the result to the desired mode
5547 and mask just this bit. */
5548 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5549 temp = gen_lowpart (rmode, temp);
5550 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5551 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5554 return temp;
5557 /* Expand fork or exec calls. TARGET is the desired target of the
5558 call. EXP is the call. FN is the
5559 identificator of the actual function. IGNORE is nonzero if the
5560 value is to be ignored. */
5562 static rtx
5563 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5565 tree id, decl;
5566 tree call;
5568 /* If we are not profiling, just call the function. */
5569 if (!profile_arc_flag)
5570 return NULL_RTX;
5572 /* Otherwise call the wrapper. This should be equivalent for the rest of
5573 compiler, so the code does not diverge, and the wrapper may run the
5574 code necessary for keeping the profiling sane. */
5576 switch (DECL_FUNCTION_CODE (fn))
5578 case BUILT_IN_FORK:
5579 id = get_identifier ("__gcov_fork");
5580 break;
5582 case BUILT_IN_EXECL:
5583 id = get_identifier ("__gcov_execl");
5584 break;
5586 case BUILT_IN_EXECV:
5587 id = get_identifier ("__gcov_execv");
5588 break;
5590 case BUILT_IN_EXECLP:
5591 id = get_identifier ("__gcov_execlp");
5592 break;
5594 case BUILT_IN_EXECLE:
5595 id = get_identifier ("__gcov_execle");
5596 break;
5598 case BUILT_IN_EXECVP:
5599 id = get_identifier ("__gcov_execvp");
5600 break;
5602 case BUILT_IN_EXECVE:
5603 id = get_identifier ("__gcov_execve");
5604 break;
5606 default:
5607 gcc_unreachable ();
5610 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5611 FUNCTION_DECL, id, TREE_TYPE (fn));
5612 DECL_EXTERNAL (decl) = 1;
5613 TREE_PUBLIC (decl) = 1;
5614 DECL_ARTIFICIAL (decl) = 1;
5615 TREE_NOTHROW (decl) = 1;
5616 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5617 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5618 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5619 return expand_call (call, target, ignore);
5624 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5625 the pointer in these functions is void*, the tree optimizers may remove
5626 casts. The mode computed in expand_builtin isn't reliable either, due
5627 to __sync_bool_compare_and_swap.
5629 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5630 group of builtins. This gives us log2 of the mode size. */
5632 static inline machine_mode
5633 get_builtin_sync_mode (int fcode_diff)
5635 /* The size is not negotiable, so ask not to get BLKmode in return
5636 if the target indicates that a smaller size would be better. */
5637 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5640 /* Expand the memory expression LOC and return the appropriate memory operand
5641 for the builtin_sync operations. */
5643 static rtx
5644 get_builtin_sync_mem (tree loc, machine_mode mode)
5646 rtx addr, mem;
5648 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5649 addr = convert_memory_address (Pmode, addr);
5651 /* Note that we explicitly do not want any alias information for this
5652 memory, so that we kill all other live memories. Otherwise we don't
5653 satisfy the full barrier semantics of the intrinsic. */
5654 mem = validize_mem (gen_rtx_MEM (mode, addr));
5656 /* The alignment needs to be at least according to that of the mode. */
5657 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5658 get_pointer_alignment (loc)));
5659 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5660 MEM_VOLATILE_P (mem) = 1;
5662 return mem;
5665 /* Make sure an argument is in the right mode.
5666 EXP is the tree argument.
5667 MODE is the mode it should be in. */
5669 static rtx
5670 expand_expr_force_mode (tree exp, machine_mode mode)
5672 rtx val;
5673 machine_mode old_mode;
5675 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5676 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5677 of CONST_INTs, where we know the old_mode only from the call argument. */
5679 old_mode = GET_MODE (val);
5680 if (old_mode == VOIDmode)
5681 old_mode = TYPE_MODE (TREE_TYPE (exp));
5682 val = convert_modes (mode, old_mode, val, 1);
5683 return val;
5687 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5688 EXP is the CALL_EXPR. CODE is the rtx code
5689 that corresponds to the arithmetic or logical operation from the name;
5690 an exception here is that NOT actually means NAND. TARGET is an optional
5691 place for us to store the results; AFTER is true if this is the
5692 fetch_and_xxx form. */
5694 static rtx
5695 expand_builtin_sync_operation (machine_mode mode, tree exp,
5696 enum rtx_code code, bool after,
5697 rtx target)
5699 rtx val, mem;
5700 location_t loc = EXPR_LOCATION (exp);
5702 if (code == NOT && warn_sync_nand)
5704 tree fndecl = get_callee_fndecl (exp);
5705 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5707 static bool warned_f_a_n, warned_n_a_f;
5709 switch (fcode)
5711 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5712 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5713 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5714 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5715 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5716 if (warned_f_a_n)
5717 break;
5719 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5720 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5721 warned_f_a_n = true;
5722 break;
5724 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5725 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5726 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5727 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5728 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5729 if (warned_n_a_f)
5730 break;
5732 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5733 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5734 warned_n_a_f = true;
5735 break;
5737 default:
5738 gcc_unreachable ();
5742 /* Expand the operands. */
5743 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5744 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5746 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5747 after);
5750 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5751 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5752 true if this is the boolean form. TARGET is a place for us to store the
5753 results; this is NOT optional if IS_BOOL is true. */
5755 static rtx
5756 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5757 bool is_bool, rtx target)
5759 rtx old_val, new_val, mem;
5760 rtx *pbool, *poval;
5762 /* Expand the operands. */
5763 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5764 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5765 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5767 pbool = poval = NULL;
5768 if (target != const0_rtx)
5770 if (is_bool)
5771 pbool = &target;
5772 else
5773 poval = &target;
5775 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5776 false, MEMMODEL_SYNC_SEQ_CST,
5777 MEMMODEL_SYNC_SEQ_CST))
5778 return NULL_RTX;
5780 return target;
5783 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5784 general form is actually an atomic exchange, and some targets only
5785 support a reduced form with the second argument being a constant 1.
5786 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5787 the results. */
5789 static rtx
5790 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5791 rtx target)
5793 rtx val, mem;
5795 /* Expand the operands. */
5796 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5797 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5799 return expand_sync_lock_test_and_set (target, mem, val);
5802 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5804 static void
5805 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5807 rtx mem;
5809 /* Expand the operands. */
5810 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5812 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5815 /* Given an integer representing an ``enum memmodel'', verify its
5816 correctness and return the memory model enum. */
5818 static enum memmodel
5819 get_memmodel (tree exp)
5821 rtx op;
5822 unsigned HOST_WIDE_INT val;
5823 source_location loc
5824 = expansion_point_location_if_in_system_header (input_location);
5826 /* If the parameter is not a constant, it's a run time value so we'll just
5827 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5828 if (TREE_CODE (exp) != INTEGER_CST)
5829 return MEMMODEL_SEQ_CST;
5831 op = expand_normal (exp);
5833 val = INTVAL (op);
5834 if (targetm.memmodel_check)
5835 val = targetm.memmodel_check (val);
5836 else if (val & ~MEMMODEL_MASK)
5838 warning_at (loc, OPT_Winvalid_memory_model,
5839 "unknown architecture specifier in memory model to builtin");
5840 return MEMMODEL_SEQ_CST;
5843 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5844 if (memmodel_base (val) >= MEMMODEL_LAST)
5846 warning_at (loc, OPT_Winvalid_memory_model,
5847 "invalid memory model argument to builtin");
5848 return MEMMODEL_SEQ_CST;
5851 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5852 be conservative and promote consume to acquire. */
5853 if (val == MEMMODEL_CONSUME)
5854 val = MEMMODEL_ACQUIRE;
5856 return (enum memmodel) val;
5859 /* Expand the __atomic_exchange intrinsic:
5860 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5861 EXP is the CALL_EXPR.
5862 TARGET is an optional place for us to store the results. */
5864 static rtx
5865 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5867 rtx val, mem;
5868 enum memmodel model;
5870 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5872 if (!flag_inline_atomics)
5873 return NULL_RTX;
5875 /* Expand the operands. */
5876 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5877 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5879 return expand_atomic_exchange (target, mem, val, model);
5882 /* Expand the __atomic_compare_exchange intrinsic:
5883 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5884 TYPE desired, BOOL weak,
5885 enum memmodel success,
5886 enum memmodel failure)
5887 EXP is the CALL_EXPR.
5888 TARGET is an optional place for us to store the results. */
5890 static rtx
5891 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5892 rtx target)
5894 rtx expect, desired, mem, oldval;
5895 rtx_code_label *label;
5896 enum memmodel success, failure;
5897 tree weak;
5898 bool is_weak;
5899 source_location loc
5900 = expansion_point_location_if_in_system_header (input_location);
5902 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5903 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5905 if (failure > success)
5907 warning_at (loc, OPT_Winvalid_memory_model,
5908 "failure memory model cannot be stronger than success "
5909 "memory model for %<__atomic_compare_exchange%>");
5910 success = MEMMODEL_SEQ_CST;
5913 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5915 warning_at (loc, OPT_Winvalid_memory_model,
5916 "invalid failure memory model for "
5917 "%<__atomic_compare_exchange%>");
5918 failure = MEMMODEL_SEQ_CST;
5919 success = MEMMODEL_SEQ_CST;
5923 if (!flag_inline_atomics)
5924 return NULL_RTX;
5926 /* Expand the operands. */
5927 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5929 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5930 expect = convert_memory_address (Pmode, expect);
5931 expect = gen_rtx_MEM (mode, expect);
5932 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5934 weak = CALL_EXPR_ARG (exp, 3);
5935 is_weak = false;
5936 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5937 is_weak = true;
5939 if (target == const0_rtx)
5940 target = NULL;
5942 /* Lest the rtl backend create a race condition with an imporoper store
5943 to memory, always create a new pseudo for OLDVAL. */
5944 oldval = NULL;
5946 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5947 is_weak, success, failure))
5948 return NULL_RTX;
5950 /* Conditionally store back to EXPECT, lest we create a race condition
5951 with an improper store to memory. */
5952 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5953 the normal case where EXPECT is totally private, i.e. a register. At
5954 which point the store can be unconditional. */
5955 label = gen_label_rtx ();
5956 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5957 GET_MODE (target), 1, label);
5958 emit_move_insn (expect, oldval);
5959 emit_label (label);
5961 return target;
5964 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5965 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5966 call. The weak parameter must be dropped to match the expected parameter
5967 list and the expected argument changed from value to pointer to memory
5968 slot. */
5970 static void
5971 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5973 unsigned int z;
5974 vec<tree, va_gc> *vec;
5976 vec_alloc (vec, 5);
5977 vec->quick_push (gimple_call_arg (call, 0));
5978 tree expected = gimple_call_arg (call, 1);
5979 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5980 TREE_TYPE (expected));
5981 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5982 if (expd != x)
5983 emit_move_insn (x, expd);
5984 tree v = make_tree (TREE_TYPE (expected), x);
5985 vec->quick_push (build1 (ADDR_EXPR,
5986 build_pointer_type (TREE_TYPE (expected)), v));
5987 vec->quick_push (gimple_call_arg (call, 2));
5988 /* Skip the boolean weak parameter. */
5989 for (z = 4; z < 6; z++)
5990 vec->quick_push (gimple_call_arg (call, z));
5991 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5992 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5993 gcc_assert (bytes_log2 < 5);
5994 built_in_function fncode
5995 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5996 + bytes_log2);
5997 tree fndecl = builtin_decl_explicit (fncode);
5998 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5999 fndecl);
6000 tree exp = build_call_vec (boolean_type_node, fn, vec);
6001 tree lhs = gimple_call_lhs (call);
6002 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6003 if (lhs)
6005 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6006 if (GET_MODE (boolret) != mode)
6007 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6008 x = force_reg (mode, x);
6009 write_complex_part (target, boolret, true);
6010 write_complex_part (target, x, false);
6014 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6016 void
6017 expand_ifn_atomic_compare_exchange (gcall *call)
6019 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6020 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6021 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6022 rtx expect, desired, mem, oldval, boolret;
6023 enum memmodel success, failure;
6024 tree lhs;
6025 bool is_weak;
6026 source_location loc
6027 = expansion_point_location_if_in_system_header (gimple_location (call));
6029 success = get_memmodel (gimple_call_arg (call, 4));
6030 failure = get_memmodel (gimple_call_arg (call, 5));
6032 if (failure > success)
6034 warning_at (loc, OPT_Winvalid_memory_model,
6035 "failure memory model cannot be stronger than success "
6036 "memory model for %<__atomic_compare_exchange%>");
6037 success = MEMMODEL_SEQ_CST;
6040 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6042 warning_at (loc, OPT_Winvalid_memory_model,
6043 "invalid failure memory model for "
6044 "%<__atomic_compare_exchange%>");
6045 failure = MEMMODEL_SEQ_CST;
6046 success = MEMMODEL_SEQ_CST;
6049 if (!flag_inline_atomics)
6051 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6052 return;
6055 /* Expand the operands. */
6056 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6058 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6059 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6061 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6063 boolret = NULL;
6064 oldval = NULL;
6066 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6067 is_weak, success, failure))
6069 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6070 return;
6073 lhs = gimple_call_lhs (call);
6074 if (lhs)
6076 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6077 if (GET_MODE (boolret) != mode)
6078 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6079 write_complex_part (target, boolret, true);
6080 write_complex_part (target, oldval, false);
6084 /* Expand the __atomic_load intrinsic:
6085 TYPE __atomic_load (TYPE *object, enum memmodel)
6086 EXP is the CALL_EXPR.
6087 TARGET is an optional place for us to store the results. */
6089 static rtx
6090 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6092 rtx mem;
6093 enum memmodel model;
6095 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6096 if (is_mm_release (model) || is_mm_acq_rel (model))
6098 source_location loc
6099 = expansion_point_location_if_in_system_header (input_location);
6100 warning_at (loc, OPT_Winvalid_memory_model,
6101 "invalid memory model for %<__atomic_load%>");
6102 model = MEMMODEL_SEQ_CST;
6105 if (!flag_inline_atomics)
6106 return NULL_RTX;
6108 /* Expand the operand. */
6109 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6111 return expand_atomic_load (target, mem, model);
6115 /* Expand the __atomic_store intrinsic:
6116 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6117 EXP is the CALL_EXPR.
6118 TARGET is an optional place for us to store the results. */
6120 static rtx
6121 expand_builtin_atomic_store (machine_mode mode, tree exp)
6123 rtx mem, val;
6124 enum memmodel model;
6126 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6127 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6128 || is_mm_release (model)))
6130 source_location loc
6131 = expansion_point_location_if_in_system_header (input_location);
6132 warning_at (loc, OPT_Winvalid_memory_model,
6133 "invalid memory model for %<__atomic_store%>");
6134 model = MEMMODEL_SEQ_CST;
6137 if (!flag_inline_atomics)
6138 return NULL_RTX;
6140 /* Expand the operands. */
6141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6142 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6144 return expand_atomic_store (mem, val, model, false);
6147 /* Expand the __atomic_fetch_XXX intrinsic:
6148 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6149 EXP is the CALL_EXPR.
6150 TARGET is an optional place for us to store the results.
6151 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6152 FETCH_AFTER is true if returning the result of the operation.
6153 FETCH_AFTER is false if returning the value before the operation.
6154 IGNORE is true if the result is not used.
6155 EXT_CALL is the correct builtin for an external call if this cannot be
6156 resolved to an instruction sequence. */
6158 static rtx
6159 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6160 enum rtx_code code, bool fetch_after,
6161 bool ignore, enum built_in_function ext_call)
6163 rtx val, mem, ret;
6164 enum memmodel model;
6165 tree fndecl;
6166 tree addr;
6168 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6170 /* Expand the operands. */
6171 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6172 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6174 /* Only try generating instructions if inlining is turned on. */
6175 if (flag_inline_atomics)
6177 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6178 if (ret)
6179 return ret;
6182 /* Return if a different routine isn't needed for the library call. */
6183 if (ext_call == BUILT_IN_NONE)
6184 return NULL_RTX;
6186 /* Change the call to the specified function. */
6187 fndecl = get_callee_fndecl (exp);
6188 addr = CALL_EXPR_FN (exp);
6189 STRIP_NOPS (addr);
6191 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6192 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6194 /* If we will emit code after the call, the call can not be a tail call.
6195 If it is emitted as a tail call, a barrier is emitted after it, and
6196 then all trailing code is removed. */
6197 if (!ignore)
6198 CALL_EXPR_TAILCALL (exp) = 0;
6200 /* Expand the call here so we can emit trailing code. */
6201 ret = expand_call (exp, target, ignore);
6203 /* Replace the original function just in case it matters. */
6204 TREE_OPERAND (addr, 0) = fndecl;
6206 /* Then issue the arithmetic correction to return the right result. */
6207 if (!ignore)
6209 if (code == NOT)
6211 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6212 OPTAB_LIB_WIDEN);
6213 ret = expand_simple_unop (mode, NOT, ret, target, true);
6215 else
6216 ret = expand_simple_binop (mode, code, ret, val, target, true,
6217 OPTAB_LIB_WIDEN);
6219 return ret;
6222 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6224 void
6225 expand_ifn_atomic_bit_test_and (gcall *call)
6227 tree ptr = gimple_call_arg (call, 0);
6228 tree bit = gimple_call_arg (call, 1);
6229 tree flag = gimple_call_arg (call, 2);
6230 tree lhs = gimple_call_lhs (call);
6231 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6232 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6233 enum rtx_code code;
6234 optab optab;
6235 struct expand_operand ops[5];
6237 gcc_assert (flag_inline_atomics);
6239 if (gimple_call_num_args (call) == 4)
6240 model = get_memmodel (gimple_call_arg (call, 3));
6242 rtx mem = get_builtin_sync_mem (ptr, mode);
6243 rtx val = expand_expr_force_mode (bit, mode);
6245 switch (gimple_call_internal_fn (call))
6247 case IFN_ATOMIC_BIT_TEST_AND_SET:
6248 code = IOR;
6249 optab = atomic_bit_test_and_set_optab;
6250 break;
6251 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6252 code = XOR;
6253 optab = atomic_bit_test_and_complement_optab;
6254 break;
6255 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6256 code = AND;
6257 optab = atomic_bit_test_and_reset_optab;
6258 break;
6259 default:
6260 gcc_unreachable ();
6263 if (lhs == NULL_TREE)
6265 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6266 val, NULL_RTX, true, OPTAB_DIRECT);
6267 if (code == AND)
6268 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6269 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6270 return;
6273 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6274 enum insn_code icode = direct_optab_handler (optab, mode);
6275 gcc_assert (icode != CODE_FOR_nothing);
6276 create_output_operand (&ops[0], target, mode);
6277 create_fixed_operand (&ops[1], mem);
6278 create_convert_operand_to (&ops[2], val, mode, true);
6279 create_integer_operand (&ops[3], model);
6280 create_integer_operand (&ops[4], integer_onep (flag));
6281 if (maybe_expand_insn (icode, 5, ops))
6282 return;
6284 rtx bitval = val;
6285 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6286 val, NULL_RTX, true, OPTAB_DIRECT);
6287 rtx maskval = val;
6288 if (code == AND)
6289 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6290 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6291 code, model, false);
6292 if (integer_onep (flag))
6294 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6295 NULL_RTX, true, OPTAB_DIRECT);
6296 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6297 true, OPTAB_DIRECT);
6299 else
6300 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6301 OPTAB_DIRECT);
6302 if (result != target)
6303 emit_move_insn (target, result);
6306 /* Expand an atomic clear operation.
6307 void _atomic_clear (BOOL *obj, enum memmodel)
6308 EXP is the call expression. */
6310 static rtx
6311 expand_builtin_atomic_clear (tree exp)
6313 machine_mode mode;
6314 rtx mem, ret;
6315 enum memmodel model;
6317 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6318 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6319 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6321 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6323 source_location loc
6324 = expansion_point_location_if_in_system_header (input_location);
6325 warning_at (loc, OPT_Winvalid_memory_model,
6326 "invalid memory model for %<__atomic_store%>");
6327 model = MEMMODEL_SEQ_CST;
6330 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6331 Failing that, a store is issued by __atomic_store. The only way this can
6332 fail is if the bool type is larger than a word size. Unlikely, but
6333 handle it anyway for completeness. Assume a single threaded model since
6334 there is no atomic support in this case, and no barriers are required. */
6335 ret = expand_atomic_store (mem, const0_rtx, model, true);
6336 if (!ret)
6337 emit_move_insn (mem, const0_rtx);
6338 return const0_rtx;
6341 /* Expand an atomic test_and_set operation.
6342 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6343 EXP is the call expression. */
6345 static rtx
6346 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6348 rtx mem;
6349 enum memmodel model;
6350 machine_mode mode;
6352 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6353 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6354 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6356 return expand_atomic_test_and_set (target, mem, model);
6360 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6361 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6363 static tree
6364 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6366 int size;
6367 machine_mode mode;
6368 unsigned int mode_align, type_align;
6370 if (TREE_CODE (arg0) != INTEGER_CST)
6371 return NULL_TREE;
6373 /* We need a corresponding integer mode for the access to be lock-free. */
6374 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6375 if (!int_mode_for_size (size, 0).exists (&mode))
6376 return boolean_false_node;
6378 mode_align = GET_MODE_ALIGNMENT (mode);
6380 if (TREE_CODE (arg1) == INTEGER_CST)
6382 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6384 /* Either this argument is null, or it's a fake pointer encoding
6385 the alignment of the object. */
6386 val = least_bit_hwi (val);
6387 val *= BITS_PER_UNIT;
6389 if (val == 0 || mode_align < val)
6390 type_align = mode_align;
6391 else
6392 type_align = val;
6394 else
6396 tree ttype = TREE_TYPE (arg1);
6398 /* This function is usually invoked and folded immediately by the front
6399 end before anything else has a chance to look at it. The pointer
6400 parameter at this point is usually cast to a void *, so check for that
6401 and look past the cast. */
6402 if (CONVERT_EXPR_P (arg1)
6403 && POINTER_TYPE_P (ttype)
6404 && VOID_TYPE_P (TREE_TYPE (ttype))
6405 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6406 arg1 = TREE_OPERAND (arg1, 0);
6408 ttype = TREE_TYPE (arg1);
6409 gcc_assert (POINTER_TYPE_P (ttype));
6411 /* Get the underlying type of the object. */
6412 ttype = TREE_TYPE (ttype);
6413 type_align = TYPE_ALIGN (ttype);
6416 /* If the object has smaller alignment, the lock free routines cannot
6417 be used. */
6418 if (type_align < mode_align)
6419 return boolean_false_node;
6421 /* Check if a compare_and_swap pattern exists for the mode which represents
6422 the required size. The pattern is not allowed to fail, so the existence
6423 of the pattern indicates support is present. Also require that an
6424 atomic load exists for the required size. */
6425 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6426 return boolean_true_node;
6427 else
6428 return boolean_false_node;
6431 /* Return true if the parameters to call EXP represent an object which will
6432 always generate lock free instructions. The first argument represents the
6433 size of the object, and the second parameter is a pointer to the object
6434 itself. If NULL is passed for the object, then the result is based on
6435 typical alignment for an object of the specified size. Otherwise return
6436 false. */
6438 static rtx
6439 expand_builtin_atomic_always_lock_free (tree exp)
6441 tree size;
6442 tree arg0 = CALL_EXPR_ARG (exp, 0);
6443 tree arg1 = CALL_EXPR_ARG (exp, 1);
6445 if (TREE_CODE (arg0) != INTEGER_CST)
6447 error ("non-constant argument 1 to __atomic_always_lock_free");
6448 return const0_rtx;
6451 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6452 if (size == boolean_true_node)
6453 return const1_rtx;
6454 return const0_rtx;
6457 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6458 is lock free on this architecture. */
6460 static tree
6461 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6463 if (!flag_inline_atomics)
6464 return NULL_TREE;
6466 /* If it isn't always lock free, don't generate a result. */
6467 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6468 return boolean_true_node;
6470 return NULL_TREE;
6473 /* Return true if the parameters to call EXP represent an object which will
6474 always generate lock free instructions. The first argument represents the
6475 size of the object, and the second parameter is a pointer to the object
6476 itself. If NULL is passed for the object, then the result is based on
6477 typical alignment for an object of the specified size. Otherwise return
6478 NULL*/
6480 static rtx
6481 expand_builtin_atomic_is_lock_free (tree exp)
6483 tree size;
6484 tree arg0 = CALL_EXPR_ARG (exp, 0);
6485 tree arg1 = CALL_EXPR_ARG (exp, 1);
6487 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6489 error ("non-integer argument 1 to __atomic_is_lock_free");
6490 return NULL_RTX;
6493 if (!flag_inline_atomics)
6494 return NULL_RTX;
6496 /* If the value is known at compile time, return the RTX for it. */
6497 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6498 if (size == boolean_true_node)
6499 return const1_rtx;
6501 return NULL_RTX;
6504 /* Expand the __atomic_thread_fence intrinsic:
6505 void __atomic_thread_fence (enum memmodel)
6506 EXP is the CALL_EXPR. */
6508 static void
6509 expand_builtin_atomic_thread_fence (tree exp)
6511 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6512 expand_mem_thread_fence (model);
6515 /* Expand the __atomic_signal_fence intrinsic:
6516 void __atomic_signal_fence (enum memmodel)
6517 EXP is the CALL_EXPR. */
6519 static void
6520 expand_builtin_atomic_signal_fence (tree exp)
6522 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6523 expand_mem_signal_fence (model);
6526 /* Expand the __sync_synchronize intrinsic. */
6528 static void
6529 expand_builtin_sync_synchronize (void)
6531 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6534 static rtx
6535 expand_builtin_thread_pointer (tree exp, rtx target)
6537 enum insn_code icode;
6538 if (!validate_arglist (exp, VOID_TYPE))
6539 return const0_rtx;
6540 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6541 if (icode != CODE_FOR_nothing)
6543 struct expand_operand op;
6544 /* If the target is not sutitable then create a new target. */
6545 if (target == NULL_RTX
6546 || !REG_P (target)
6547 || GET_MODE (target) != Pmode)
6548 target = gen_reg_rtx (Pmode);
6549 create_output_operand (&op, target, Pmode);
6550 expand_insn (icode, 1, &op);
6551 return target;
6553 error ("__builtin_thread_pointer is not supported on this target");
6554 return const0_rtx;
6557 static void
6558 expand_builtin_set_thread_pointer (tree exp)
6560 enum insn_code icode;
6561 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6562 return;
6563 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6564 if (icode != CODE_FOR_nothing)
6566 struct expand_operand op;
6567 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6568 Pmode, EXPAND_NORMAL);
6569 create_input_operand (&op, val, Pmode);
6570 expand_insn (icode, 1, &op);
6571 return;
6573 error ("__builtin_set_thread_pointer is not supported on this target");
6577 /* Emit code to restore the current value of stack. */
6579 static void
6580 expand_stack_restore (tree var)
6582 rtx_insn *prev;
6583 rtx sa = expand_normal (var);
6585 sa = convert_memory_address (Pmode, sa);
6587 prev = get_last_insn ();
6588 emit_stack_restore (SAVE_BLOCK, sa);
6590 record_new_stack_level ();
6592 fixup_args_size_notes (prev, get_last_insn (), 0);
6595 /* Emit code to save the current value of stack. */
6597 static rtx
6598 expand_stack_save (void)
6600 rtx ret = NULL_RTX;
6602 emit_stack_save (SAVE_BLOCK, &ret);
6603 return ret;
6607 /* Expand an expression EXP that calls a built-in function,
6608 with result going to TARGET if that's convenient
6609 (and in mode MODE if that's convenient).
6610 SUBTARGET may be used as the target for computing one of EXP's operands.
6611 IGNORE is nonzero if the value is to be ignored. */
6614 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6615 int ignore)
6617 tree fndecl = get_callee_fndecl (exp);
6618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6619 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6620 int flags;
6622 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6623 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6625 /* When ASan is enabled, we don't want to expand some memory/string
6626 builtins and rely on libsanitizer's hooks. This allows us to avoid
6627 redundant checks and be sure, that possible overflow will be detected
6628 by ASan. */
6630 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6631 return expand_call (exp, target, ignore);
6633 /* When not optimizing, generate calls to library functions for a certain
6634 set of builtins. */
6635 if (!optimize
6636 && !called_as_built_in (fndecl)
6637 && fcode != BUILT_IN_FORK
6638 && fcode != BUILT_IN_EXECL
6639 && fcode != BUILT_IN_EXECV
6640 && fcode != BUILT_IN_EXECLP
6641 && fcode != BUILT_IN_EXECLE
6642 && fcode != BUILT_IN_EXECVP
6643 && fcode != BUILT_IN_EXECVE
6644 && !ALLOCA_FUNCTION_CODE_P (fcode)
6645 && fcode != BUILT_IN_FREE
6646 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6647 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6648 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6649 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6650 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6651 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6652 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6653 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6654 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6655 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6656 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6657 && fcode != BUILT_IN_CHKP_BNDRET)
6658 return expand_call (exp, target, ignore);
6660 /* The built-in function expanders test for target == const0_rtx
6661 to determine whether the function's result will be ignored. */
6662 if (ignore)
6663 target = const0_rtx;
6665 /* If the result of a pure or const built-in function is ignored, and
6666 none of its arguments are volatile, we can avoid expanding the
6667 built-in call and just evaluate the arguments for side-effects. */
6668 if (target == const0_rtx
6669 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6670 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6672 bool volatilep = false;
6673 tree arg;
6674 call_expr_arg_iterator iter;
6676 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6677 if (TREE_THIS_VOLATILE (arg))
6679 volatilep = true;
6680 break;
6683 if (! volatilep)
6685 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6686 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6687 return const0_rtx;
6691 /* expand_builtin_with_bounds is supposed to be used for
6692 instrumented builtin calls. */
6693 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6695 switch (fcode)
6697 CASE_FLT_FN (BUILT_IN_FABS):
6698 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6699 case BUILT_IN_FABSD32:
6700 case BUILT_IN_FABSD64:
6701 case BUILT_IN_FABSD128:
6702 target = expand_builtin_fabs (exp, target, subtarget);
6703 if (target)
6704 return target;
6705 break;
6707 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6709 target = expand_builtin_copysign (exp, target, subtarget);
6710 if (target)
6711 return target;
6712 break;
6714 /* Just do a normal library call if we were unable to fold
6715 the values. */
6716 CASE_FLT_FN (BUILT_IN_CABS):
6717 break;
6719 CASE_FLT_FN (BUILT_IN_FMA):
6720 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6721 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6722 if (target)
6723 return target;
6724 break;
6726 CASE_FLT_FN (BUILT_IN_ILOGB):
6727 if (! flag_unsafe_math_optimizations)
6728 break;
6729 gcc_fallthrough ();
6730 CASE_FLT_FN (BUILT_IN_ISINF):
6731 CASE_FLT_FN (BUILT_IN_FINITE):
6732 case BUILT_IN_ISFINITE:
6733 case BUILT_IN_ISNORMAL:
6734 target = expand_builtin_interclass_mathfn (exp, target);
6735 if (target)
6736 return target;
6737 break;
6739 CASE_FLT_FN (BUILT_IN_ICEIL):
6740 CASE_FLT_FN (BUILT_IN_LCEIL):
6741 CASE_FLT_FN (BUILT_IN_LLCEIL):
6742 CASE_FLT_FN (BUILT_IN_LFLOOR):
6743 CASE_FLT_FN (BUILT_IN_IFLOOR):
6744 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6745 target = expand_builtin_int_roundingfn (exp, target);
6746 if (target)
6747 return target;
6748 break;
6750 CASE_FLT_FN (BUILT_IN_IRINT):
6751 CASE_FLT_FN (BUILT_IN_LRINT):
6752 CASE_FLT_FN (BUILT_IN_LLRINT):
6753 CASE_FLT_FN (BUILT_IN_IROUND):
6754 CASE_FLT_FN (BUILT_IN_LROUND):
6755 CASE_FLT_FN (BUILT_IN_LLROUND):
6756 target = expand_builtin_int_roundingfn_2 (exp, target);
6757 if (target)
6758 return target;
6759 break;
6761 CASE_FLT_FN (BUILT_IN_POWI):
6762 target = expand_builtin_powi (exp, target);
6763 if (target)
6764 return target;
6765 break;
6767 CASE_FLT_FN (BUILT_IN_CEXPI):
6768 target = expand_builtin_cexpi (exp, target);
6769 gcc_assert (target);
6770 return target;
6772 CASE_FLT_FN (BUILT_IN_SIN):
6773 CASE_FLT_FN (BUILT_IN_COS):
6774 if (! flag_unsafe_math_optimizations)
6775 break;
6776 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6777 if (target)
6778 return target;
6779 break;
6781 CASE_FLT_FN (BUILT_IN_SINCOS):
6782 if (! flag_unsafe_math_optimizations)
6783 break;
6784 target = expand_builtin_sincos (exp);
6785 if (target)
6786 return target;
6787 break;
6789 case BUILT_IN_APPLY_ARGS:
6790 return expand_builtin_apply_args ();
6792 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6793 FUNCTION with a copy of the parameters described by
6794 ARGUMENTS, and ARGSIZE. It returns a block of memory
6795 allocated on the stack into which is stored all the registers
6796 that might possibly be used for returning the result of a
6797 function. ARGUMENTS is the value returned by
6798 __builtin_apply_args. ARGSIZE is the number of bytes of
6799 arguments that must be copied. ??? How should this value be
6800 computed? We'll also need a safe worst case value for varargs
6801 functions. */
6802 case BUILT_IN_APPLY:
6803 if (!validate_arglist (exp, POINTER_TYPE,
6804 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6805 && !validate_arglist (exp, REFERENCE_TYPE,
6806 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6807 return const0_rtx;
6808 else
6810 rtx ops[3];
6812 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6813 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6814 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6816 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6819 /* __builtin_return (RESULT) causes the function to return the
6820 value described by RESULT. RESULT is address of the block of
6821 memory returned by __builtin_apply. */
6822 case BUILT_IN_RETURN:
6823 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6824 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6825 return const0_rtx;
6827 case BUILT_IN_SAVEREGS:
6828 return expand_builtin_saveregs ();
6830 case BUILT_IN_VA_ARG_PACK:
6831 /* All valid uses of __builtin_va_arg_pack () are removed during
6832 inlining. */
6833 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6834 return const0_rtx;
6836 case BUILT_IN_VA_ARG_PACK_LEN:
6837 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6838 inlining. */
6839 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6840 return const0_rtx;
6842 /* Return the address of the first anonymous stack arg. */
6843 case BUILT_IN_NEXT_ARG:
6844 if (fold_builtin_next_arg (exp, false))
6845 return const0_rtx;
6846 return expand_builtin_next_arg ();
6848 case BUILT_IN_CLEAR_CACHE:
6849 target = expand_builtin___clear_cache (exp);
6850 if (target)
6851 return target;
6852 break;
6854 case BUILT_IN_CLASSIFY_TYPE:
6855 return expand_builtin_classify_type (exp);
6857 case BUILT_IN_CONSTANT_P:
6858 return const0_rtx;
6860 case BUILT_IN_FRAME_ADDRESS:
6861 case BUILT_IN_RETURN_ADDRESS:
6862 return expand_builtin_frame_address (fndecl, exp);
6864 /* Returns the address of the area where the structure is returned.
6865 0 otherwise. */
6866 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6867 if (call_expr_nargs (exp) != 0
6868 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6869 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6870 return const0_rtx;
6871 else
6872 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6874 CASE_BUILT_IN_ALLOCA:
6875 target = expand_builtin_alloca (exp);
6876 if (target)
6877 return target;
6878 break;
6880 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6881 return expand_asan_emit_allocas_unpoison (exp);
6883 case BUILT_IN_STACK_SAVE:
6884 return expand_stack_save ();
6886 case BUILT_IN_STACK_RESTORE:
6887 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6888 return const0_rtx;
6890 case BUILT_IN_BSWAP16:
6891 case BUILT_IN_BSWAP32:
6892 case BUILT_IN_BSWAP64:
6893 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6894 if (target)
6895 return target;
6896 break;
6898 CASE_INT_FN (BUILT_IN_FFS):
6899 target = expand_builtin_unop (target_mode, exp, target,
6900 subtarget, ffs_optab);
6901 if (target)
6902 return target;
6903 break;
6905 CASE_INT_FN (BUILT_IN_CLZ):
6906 target = expand_builtin_unop (target_mode, exp, target,
6907 subtarget, clz_optab);
6908 if (target)
6909 return target;
6910 break;
6912 CASE_INT_FN (BUILT_IN_CTZ):
6913 target = expand_builtin_unop (target_mode, exp, target,
6914 subtarget, ctz_optab);
6915 if (target)
6916 return target;
6917 break;
6919 CASE_INT_FN (BUILT_IN_CLRSB):
6920 target = expand_builtin_unop (target_mode, exp, target,
6921 subtarget, clrsb_optab);
6922 if (target)
6923 return target;
6924 break;
6926 CASE_INT_FN (BUILT_IN_POPCOUNT):
6927 target = expand_builtin_unop (target_mode, exp, target,
6928 subtarget, popcount_optab);
6929 if (target)
6930 return target;
6931 break;
6933 CASE_INT_FN (BUILT_IN_PARITY):
6934 target = expand_builtin_unop (target_mode, exp, target,
6935 subtarget, parity_optab);
6936 if (target)
6937 return target;
6938 break;
6940 case BUILT_IN_STRLEN:
6941 target = expand_builtin_strlen (exp, target, target_mode);
6942 if (target)
6943 return target;
6944 break;
6946 case BUILT_IN_STRCAT:
6947 target = expand_builtin_strcat (exp, target);
6948 if (target)
6949 return target;
6950 break;
6952 case BUILT_IN_STRCPY:
6953 target = expand_builtin_strcpy (exp, target);
6954 if (target)
6955 return target;
6956 break;
6958 case BUILT_IN_STRNCAT:
6959 target = expand_builtin_strncat (exp, target);
6960 if (target)
6961 return target;
6962 break;
6964 case BUILT_IN_STRNCPY:
6965 target = expand_builtin_strncpy (exp, target);
6966 if (target)
6967 return target;
6968 break;
6970 case BUILT_IN_STPCPY:
6971 target = expand_builtin_stpcpy (exp, target, mode);
6972 if (target)
6973 return target;
6974 break;
6976 case BUILT_IN_STPNCPY:
6977 target = expand_builtin_stpncpy (exp, target);
6978 if (target)
6979 return target;
6980 break;
6982 case BUILT_IN_MEMCHR:
6983 target = expand_builtin_memchr (exp, target);
6984 if (target)
6985 return target;
6986 break;
6988 case BUILT_IN_MEMCPY:
6989 target = expand_builtin_memcpy (exp, target);
6990 if (target)
6991 return target;
6992 break;
6994 case BUILT_IN_MEMMOVE:
6995 target = expand_builtin_memmove (exp, target);
6996 if (target)
6997 return target;
6998 break;
7000 case BUILT_IN_MEMPCPY:
7001 target = expand_builtin_mempcpy (exp, target);
7002 if (target)
7003 return target;
7004 break;
7006 case BUILT_IN_MEMSET:
7007 target = expand_builtin_memset (exp, target, mode);
7008 if (target)
7009 return target;
7010 break;
7012 case BUILT_IN_BZERO:
7013 target = expand_builtin_bzero (exp);
7014 if (target)
7015 return target;
7016 break;
7018 case BUILT_IN_STRCMP:
7019 target = expand_builtin_strcmp (exp, target);
7020 if (target)
7021 return target;
7022 break;
7024 case BUILT_IN_STRNCMP:
7025 target = expand_builtin_strncmp (exp, target, mode);
7026 if (target)
7027 return target;
7028 break;
7030 case BUILT_IN_BCMP:
7031 case BUILT_IN_MEMCMP:
7032 case BUILT_IN_MEMCMP_EQ:
7033 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7034 if (target)
7035 return target;
7036 if (fcode == BUILT_IN_MEMCMP_EQ)
7038 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7039 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7041 break;
7043 case BUILT_IN_SETJMP:
7044 /* This should have been lowered to the builtins below. */
7045 gcc_unreachable ();
7047 case BUILT_IN_SETJMP_SETUP:
7048 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7049 and the receiver label. */
7050 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7052 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7053 VOIDmode, EXPAND_NORMAL);
7054 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7055 rtx_insn *label_r = label_rtx (label);
7057 /* This is copied from the handling of non-local gotos. */
7058 expand_builtin_setjmp_setup (buf_addr, label_r);
7059 nonlocal_goto_handler_labels
7060 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7061 nonlocal_goto_handler_labels);
7062 /* ??? Do not let expand_label treat us as such since we would
7063 not want to be both on the list of non-local labels and on
7064 the list of forced labels. */
7065 FORCED_LABEL (label) = 0;
7066 return const0_rtx;
7068 break;
7070 case BUILT_IN_SETJMP_RECEIVER:
7071 /* __builtin_setjmp_receiver is passed the receiver label. */
7072 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7074 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7075 rtx_insn *label_r = label_rtx (label);
7077 expand_builtin_setjmp_receiver (label_r);
7078 return const0_rtx;
7080 break;
7082 /* __builtin_longjmp is passed a pointer to an array of five words.
7083 It's similar to the C library longjmp function but works with
7084 __builtin_setjmp above. */
7085 case BUILT_IN_LONGJMP:
7086 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7088 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7089 VOIDmode, EXPAND_NORMAL);
7090 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7092 if (value != const1_rtx)
7094 error ("%<__builtin_longjmp%> second argument must be 1");
7095 return const0_rtx;
7098 expand_builtin_longjmp (buf_addr, value);
7099 return const0_rtx;
7101 break;
7103 case BUILT_IN_NONLOCAL_GOTO:
7104 target = expand_builtin_nonlocal_goto (exp);
7105 if (target)
7106 return target;
7107 break;
7109 /* This updates the setjmp buffer that is its argument with the value
7110 of the current stack pointer. */
7111 case BUILT_IN_UPDATE_SETJMP_BUF:
7112 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7114 rtx buf_addr
7115 = expand_normal (CALL_EXPR_ARG (exp, 0));
7117 expand_builtin_update_setjmp_buf (buf_addr);
7118 return const0_rtx;
7120 break;
7122 case BUILT_IN_TRAP:
7123 expand_builtin_trap ();
7124 return const0_rtx;
7126 case BUILT_IN_UNREACHABLE:
7127 expand_builtin_unreachable ();
7128 return const0_rtx;
7130 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7131 case BUILT_IN_SIGNBITD32:
7132 case BUILT_IN_SIGNBITD64:
7133 case BUILT_IN_SIGNBITD128:
7134 target = expand_builtin_signbit (exp, target);
7135 if (target)
7136 return target;
7137 break;
7139 /* Various hooks for the DWARF 2 __throw routine. */
7140 case BUILT_IN_UNWIND_INIT:
7141 expand_builtin_unwind_init ();
7142 return const0_rtx;
7143 case BUILT_IN_DWARF_CFA:
7144 return virtual_cfa_rtx;
7145 #ifdef DWARF2_UNWIND_INFO
7146 case BUILT_IN_DWARF_SP_COLUMN:
7147 return expand_builtin_dwarf_sp_column ();
7148 case BUILT_IN_INIT_DWARF_REG_SIZES:
7149 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7150 return const0_rtx;
7151 #endif
7152 case BUILT_IN_FROB_RETURN_ADDR:
7153 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7154 case BUILT_IN_EXTRACT_RETURN_ADDR:
7155 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7156 case BUILT_IN_EH_RETURN:
7157 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7158 CALL_EXPR_ARG (exp, 1));
7159 return const0_rtx;
7160 case BUILT_IN_EH_RETURN_DATA_REGNO:
7161 return expand_builtin_eh_return_data_regno (exp);
7162 case BUILT_IN_EXTEND_POINTER:
7163 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7164 case BUILT_IN_EH_POINTER:
7165 return expand_builtin_eh_pointer (exp);
7166 case BUILT_IN_EH_FILTER:
7167 return expand_builtin_eh_filter (exp);
7168 case BUILT_IN_EH_COPY_VALUES:
7169 return expand_builtin_eh_copy_values (exp);
7171 case BUILT_IN_VA_START:
7172 return expand_builtin_va_start (exp);
7173 case BUILT_IN_VA_END:
7174 return expand_builtin_va_end (exp);
7175 case BUILT_IN_VA_COPY:
7176 return expand_builtin_va_copy (exp);
7177 case BUILT_IN_EXPECT:
7178 return expand_builtin_expect (exp, target);
7179 case BUILT_IN_ASSUME_ALIGNED:
7180 return expand_builtin_assume_aligned (exp, target);
7181 case BUILT_IN_PREFETCH:
7182 expand_builtin_prefetch (exp);
7183 return const0_rtx;
7185 case BUILT_IN_INIT_TRAMPOLINE:
7186 return expand_builtin_init_trampoline (exp, true);
7187 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7188 return expand_builtin_init_trampoline (exp, false);
7189 case BUILT_IN_ADJUST_TRAMPOLINE:
7190 return expand_builtin_adjust_trampoline (exp);
7192 case BUILT_IN_INIT_DESCRIPTOR:
7193 return expand_builtin_init_descriptor (exp);
7194 case BUILT_IN_ADJUST_DESCRIPTOR:
7195 return expand_builtin_adjust_descriptor (exp);
7197 case BUILT_IN_FORK:
7198 case BUILT_IN_EXECL:
7199 case BUILT_IN_EXECV:
7200 case BUILT_IN_EXECLP:
7201 case BUILT_IN_EXECLE:
7202 case BUILT_IN_EXECVP:
7203 case BUILT_IN_EXECVE:
7204 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7205 if (target)
7206 return target;
7207 break;
7209 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7210 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7211 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7212 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7213 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7214 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7215 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7216 if (target)
7217 return target;
7218 break;
7220 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7221 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7222 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7223 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7224 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7225 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7226 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7227 if (target)
7228 return target;
7229 break;
7231 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7232 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7233 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7234 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7235 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7236 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7237 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7238 if (target)
7239 return target;
7240 break;
7242 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7243 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7244 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7245 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7246 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7247 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7248 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7249 if (target)
7250 return target;
7251 break;
7253 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7254 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7255 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7256 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7257 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7258 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7259 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7260 if (target)
7261 return target;
7262 break;
7264 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7265 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7266 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7267 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7268 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7269 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7270 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7271 if (target)
7272 return target;
7273 break;
7275 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7276 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7277 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7278 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7279 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7280 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7281 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7282 if (target)
7283 return target;
7284 break;
7286 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7287 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7288 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7289 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7290 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7291 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7292 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7293 if (target)
7294 return target;
7295 break;
7297 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7298 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7299 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7300 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7301 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7302 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7303 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7304 if (target)
7305 return target;
7306 break;
7308 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7309 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7310 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7311 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7312 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7313 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7314 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7315 if (target)
7316 return target;
7317 break;
7319 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7320 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7321 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7322 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7323 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7324 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7325 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7326 if (target)
7327 return target;
7328 break;
7330 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7331 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7332 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7333 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7334 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7336 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7337 if (target)
7338 return target;
7339 break;
7341 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7342 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7343 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7344 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7345 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7346 if (mode == VOIDmode)
7347 mode = TYPE_MODE (boolean_type_node);
7348 if (!target || !register_operand (target, mode))
7349 target = gen_reg_rtx (mode);
7351 mode = get_builtin_sync_mode
7352 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7353 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7354 if (target)
7355 return target;
7356 break;
7358 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7359 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7360 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7361 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7362 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7363 mode = get_builtin_sync_mode
7364 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7365 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7366 if (target)
7367 return target;
7368 break;
7370 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7371 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7372 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7373 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7374 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7376 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7377 if (target)
7378 return target;
7379 break;
7381 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7382 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7383 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7384 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7385 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7387 expand_builtin_sync_lock_release (mode, exp);
7388 return const0_rtx;
7390 case BUILT_IN_SYNC_SYNCHRONIZE:
7391 expand_builtin_sync_synchronize ();
7392 return const0_rtx;
7394 case BUILT_IN_ATOMIC_EXCHANGE_1:
7395 case BUILT_IN_ATOMIC_EXCHANGE_2:
7396 case BUILT_IN_ATOMIC_EXCHANGE_4:
7397 case BUILT_IN_ATOMIC_EXCHANGE_8:
7398 case BUILT_IN_ATOMIC_EXCHANGE_16:
7399 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7400 target = expand_builtin_atomic_exchange (mode, exp, target);
7401 if (target)
7402 return target;
7403 break;
7405 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7406 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7407 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7408 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7409 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7411 unsigned int nargs, z;
7412 vec<tree, va_gc> *vec;
7414 mode =
7415 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7416 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7417 if (target)
7418 return target;
7420 /* If this is turned into an external library call, the weak parameter
7421 must be dropped to match the expected parameter list. */
7422 nargs = call_expr_nargs (exp);
7423 vec_alloc (vec, nargs - 1);
7424 for (z = 0; z < 3; z++)
7425 vec->quick_push (CALL_EXPR_ARG (exp, z));
7426 /* Skip the boolean weak parameter. */
7427 for (z = 4; z < 6; z++)
7428 vec->quick_push (CALL_EXPR_ARG (exp, z));
7429 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7430 break;
7433 case BUILT_IN_ATOMIC_LOAD_1:
7434 case BUILT_IN_ATOMIC_LOAD_2:
7435 case BUILT_IN_ATOMIC_LOAD_4:
7436 case BUILT_IN_ATOMIC_LOAD_8:
7437 case BUILT_IN_ATOMIC_LOAD_16:
7438 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7439 target = expand_builtin_atomic_load (mode, exp, target);
7440 if (target)
7441 return target;
7442 break;
7444 case BUILT_IN_ATOMIC_STORE_1:
7445 case BUILT_IN_ATOMIC_STORE_2:
7446 case BUILT_IN_ATOMIC_STORE_4:
7447 case BUILT_IN_ATOMIC_STORE_8:
7448 case BUILT_IN_ATOMIC_STORE_16:
7449 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7450 target = expand_builtin_atomic_store (mode, exp);
7451 if (target)
7452 return const0_rtx;
7453 break;
7455 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7456 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7457 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7458 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7459 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7461 enum built_in_function lib;
7462 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7463 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7464 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7465 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7466 ignore, lib);
7467 if (target)
7468 return target;
7469 break;
7471 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7472 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7473 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7474 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7475 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7477 enum built_in_function lib;
7478 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7479 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7480 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7481 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7482 ignore, lib);
7483 if (target)
7484 return target;
7485 break;
7487 case BUILT_IN_ATOMIC_AND_FETCH_1:
7488 case BUILT_IN_ATOMIC_AND_FETCH_2:
7489 case BUILT_IN_ATOMIC_AND_FETCH_4:
7490 case BUILT_IN_ATOMIC_AND_FETCH_8:
7491 case BUILT_IN_ATOMIC_AND_FETCH_16:
7493 enum built_in_function lib;
7494 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7495 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7496 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7497 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7498 ignore, lib);
7499 if (target)
7500 return target;
7501 break;
7503 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7504 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7505 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7506 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7507 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7509 enum built_in_function lib;
7510 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7511 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7512 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7513 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7514 ignore, lib);
7515 if (target)
7516 return target;
7517 break;
7519 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7520 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7521 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7522 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7523 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7525 enum built_in_function lib;
7526 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7527 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7528 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7529 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7530 ignore, lib);
7531 if (target)
7532 return target;
7533 break;
7535 case BUILT_IN_ATOMIC_OR_FETCH_1:
7536 case BUILT_IN_ATOMIC_OR_FETCH_2:
7537 case BUILT_IN_ATOMIC_OR_FETCH_4:
7538 case BUILT_IN_ATOMIC_OR_FETCH_8:
7539 case BUILT_IN_ATOMIC_OR_FETCH_16:
7541 enum built_in_function lib;
7542 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7543 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7544 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7545 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7546 ignore, lib);
7547 if (target)
7548 return target;
7549 break;
7551 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7552 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7553 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7554 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7555 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7556 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7557 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7558 ignore, BUILT_IN_NONE);
7559 if (target)
7560 return target;
7561 break;
7563 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7564 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7565 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7566 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7567 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7568 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7569 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7570 ignore, BUILT_IN_NONE);
7571 if (target)
7572 return target;
7573 break;
7575 case BUILT_IN_ATOMIC_FETCH_AND_1:
7576 case BUILT_IN_ATOMIC_FETCH_AND_2:
7577 case BUILT_IN_ATOMIC_FETCH_AND_4:
7578 case BUILT_IN_ATOMIC_FETCH_AND_8:
7579 case BUILT_IN_ATOMIC_FETCH_AND_16:
7580 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7581 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7582 ignore, BUILT_IN_NONE);
7583 if (target)
7584 return target;
7585 break;
7587 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7588 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7589 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7590 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7591 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7592 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7593 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7594 ignore, BUILT_IN_NONE);
7595 if (target)
7596 return target;
7597 break;
7599 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7600 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7601 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7602 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7603 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7604 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7605 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7606 ignore, BUILT_IN_NONE);
7607 if (target)
7608 return target;
7609 break;
7611 case BUILT_IN_ATOMIC_FETCH_OR_1:
7612 case BUILT_IN_ATOMIC_FETCH_OR_2:
7613 case BUILT_IN_ATOMIC_FETCH_OR_4:
7614 case BUILT_IN_ATOMIC_FETCH_OR_8:
7615 case BUILT_IN_ATOMIC_FETCH_OR_16:
7616 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7617 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7618 ignore, BUILT_IN_NONE);
7619 if (target)
7620 return target;
7621 break;
7623 case BUILT_IN_ATOMIC_TEST_AND_SET:
7624 return expand_builtin_atomic_test_and_set (exp, target);
7626 case BUILT_IN_ATOMIC_CLEAR:
7627 return expand_builtin_atomic_clear (exp);
7629 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7630 return expand_builtin_atomic_always_lock_free (exp);
7632 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7633 target = expand_builtin_atomic_is_lock_free (exp);
7634 if (target)
7635 return target;
7636 break;
7638 case BUILT_IN_ATOMIC_THREAD_FENCE:
7639 expand_builtin_atomic_thread_fence (exp);
7640 return const0_rtx;
7642 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7643 expand_builtin_atomic_signal_fence (exp);
7644 return const0_rtx;
7646 case BUILT_IN_OBJECT_SIZE:
7647 return expand_builtin_object_size (exp);
7649 case BUILT_IN_MEMCPY_CHK:
7650 case BUILT_IN_MEMPCPY_CHK:
7651 case BUILT_IN_MEMMOVE_CHK:
7652 case BUILT_IN_MEMSET_CHK:
7653 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7654 if (target)
7655 return target;
7656 break;
7658 case BUILT_IN_STRCPY_CHK:
7659 case BUILT_IN_STPCPY_CHK:
7660 case BUILT_IN_STRNCPY_CHK:
7661 case BUILT_IN_STPNCPY_CHK:
7662 case BUILT_IN_STRCAT_CHK:
7663 case BUILT_IN_STRNCAT_CHK:
7664 case BUILT_IN_SNPRINTF_CHK:
7665 case BUILT_IN_VSNPRINTF_CHK:
7666 maybe_emit_chk_warning (exp, fcode);
7667 break;
7669 case BUILT_IN_SPRINTF_CHK:
7670 case BUILT_IN_VSPRINTF_CHK:
7671 maybe_emit_sprintf_chk_warning (exp, fcode);
7672 break;
7674 case BUILT_IN_FREE:
7675 if (warn_free_nonheap_object)
7676 maybe_emit_free_warning (exp);
7677 break;
7679 case BUILT_IN_THREAD_POINTER:
7680 return expand_builtin_thread_pointer (exp, target);
7682 case BUILT_IN_SET_THREAD_POINTER:
7683 expand_builtin_set_thread_pointer (exp);
7684 return const0_rtx;
7686 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7687 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7688 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7689 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7690 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7691 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7692 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7693 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7694 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7695 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7696 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7697 /* We allow user CHKP builtins if Pointer Bounds
7698 Checker is off. */
7699 if (!chkp_function_instrumented_p (current_function_decl))
7701 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7702 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7703 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7704 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7705 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7706 return expand_normal (CALL_EXPR_ARG (exp, 0));
7707 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7708 return expand_normal (size_zero_node);
7709 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7710 return expand_normal (size_int (-1));
7711 else
7712 return const0_rtx;
7714 /* FALLTHROUGH */
7716 case BUILT_IN_CHKP_BNDMK:
7717 case BUILT_IN_CHKP_BNDSTX:
7718 case BUILT_IN_CHKP_BNDCL:
7719 case BUILT_IN_CHKP_BNDCU:
7720 case BUILT_IN_CHKP_BNDLDX:
7721 case BUILT_IN_CHKP_BNDRET:
7722 case BUILT_IN_CHKP_INTERSECT:
7723 case BUILT_IN_CHKP_NARROW:
7724 case BUILT_IN_CHKP_EXTRACT_LOWER:
7725 case BUILT_IN_CHKP_EXTRACT_UPPER:
7726 /* Software implementation of Pointer Bounds Checker is NYI.
7727 Target support is required. */
7728 error ("Your target platform does not support -fcheck-pointer-bounds");
7729 break;
7731 case BUILT_IN_ACC_ON_DEVICE:
7732 /* Do library call, if we failed to expand the builtin when
7733 folding. */
7734 break;
7736 default: /* just do library call, if unknown builtin */
7737 break;
7740 /* The switch statement above can drop through to cause the function
7741 to be called normally. */
7742 return expand_call (exp, target, ignore);
7745 /* Similar to expand_builtin but is used for instrumented calls. */
7748 expand_builtin_with_bounds (tree exp, rtx target,
7749 rtx subtarget ATTRIBUTE_UNUSED,
7750 machine_mode mode, int ignore)
7752 tree fndecl = get_callee_fndecl (exp);
7753 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7755 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7757 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7758 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7760 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7761 && fcode < END_CHKP_BUILTINS);
7763 switch (fcode)
7765 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7766 target = expand_builtin_memcpy_with_bounds (exp, target);
7767 if (target)
7768 return target;
7769 break;
7771 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7772 target = expand_builtin_mempcpy_with_bounds (exp, target);
7773 if (target)
7774 return target;
7775 break;
7777 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7778 target = expand_builtin_memset_with_bounds (exp, target, mode);
7779 if (target)
7780 return target;
7781 break;
7783 case BUILT_IN_MEMCPY_CHKP:
7784 case BUILT_IN_MEMMOVE_CHKP:
7785 case BUILT_IN_MEMPCPY_CHKP:
7786 if (call_expr_nargs (exp) > 3)
7788 /* memcpy_chkp (void *dst, size_t dstbnd,
7789 const void *src, size_t srcbnd, size_t n)
7790 and others take a pointer bound argument just after each
7791 pointer argument. */
7792 tree dest = CALL_EXPR_ARG (exp, 0);
7793 tree src = CALL_EXPR_ARG (exp, 2);
7794 tree len = CALL_EXPR_ARG (exp, 4);
7796 check_memop_access (exp, dest, src, len);
7797 break;
7800 default:
7801 break;
7804 /* The switch statement above can drop through to cause the function
7805 to be called normally. */
7806 return expand_call (exp, target, ignore);
7809 /* Determine whether a tree node represents a call to a built-in
7810 function. If the tree T is a call to a built-in function with
7811 the right number of arguments of the appropriate types, return
7812 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7813 Otherwise the return value is END_BUILTINS. */
7815 enum built_in_function
7816 builtin_mathfn_code (const_tree t)
7818 const_tree fndecl, arg, parmlist;
7819 const_tree argtype, parmtype;
7820 const_call_expr_arg_iterator iter;
7822 if (TREE_CODE (t) != CALL_EXPR
7823 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7824 return END_BUILTINS;
7826 fndecl = get_callee_fndecl (t);
7827 if (fndecl == NULL_TREE
7828 || TREE_CODE (fndecl) != FUNCTION_DECL
7829 || ! DECL_BUILT_IN (fndecl)
7830 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7831 return END_BUILTINS;
7833 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7834 init_const_call_expr_arg_iterator (t, &iter);
7835 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7837 /* If a function doesn't take a variable number of arguments,
7838 the last element in the list will have type `void'. */
7839 parmtype = TREE_VALUE (parmlist);
7840 if (VOID_TYPE_P (parmtype))
7842 if (more_const_call_expr_args_p (&iter))
7843 return END_BUILTINS;
7844 return DECL_FUNCTION_CODE (fndecl);
7847 if (! more_const_call_expr_args_p (&iter))
7848 return END_BUILTINS;
7850 arg = next_const_call_expr_arg (&iter);
7851 argtype = TREE_TYPE (arg);
7853 if (SCALAR_FLOAT_TYPE_P (parmtype))
7855 if (! SCALAR_FLOAT_TYPE_P (argtype))
7856 return END_BUILTINS;
7858 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7860 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7861 return END_BUILTINS;
7863 else if (POINTER_TYPE_P (parmtype))
7865 if (! POINTER_TYPE_P (argtype))
7866 return END_BUILTINS;
7868 else if (INTEGRAL_TYPE_P (parmtype))
7870 if (! INTEGRAL_TYPE_P (argtype))
7871 return END_BUILTINS;
7873 else
7874 return END_BUILTINS;
7877 /* Variable-length argument list. */
7878 return DECL_FUNCTION_CODE (fndecl);
7881 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7882 evaluate to a constant. */
7884 static tree
7885 fold_builtin_constant_p (tree arg)
7887 /* We return 1 for a numeric type that's known to be a constant
7888 value at compile-time or for an aggregate type that's a
7889 literal constant. */
7890 STRIP_NOPS (arg);
7892 /* If we know this is a constant, emit the constant of one. */
7893 if (CONSTANT_CLASS_P (arg)
7894 || (TREE_CODE (arg) == CONSTRUCTOR
7895 && TREE_CONSTANT (arg)))
7896 return integer_one_node;
7897 if (TREE_CODE (arg) == ADDR_EXPR)
7899 tree op = TREE_OPERAND (arg, 0);
7900 if (TREE_CODE (op) == STRING_CST
7901 || (TREE_CODE (op) == ARRAY_REF
7902 && integer_zerop (TREE_OPERAND (op, 1))
7903 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7904 return integer_one_node;
7907 /* If this expression has side effects, show we don't know it to be a
7908 constant. Likewise if it's a pointer or aggregate type since in
7909 those case we only want literals, since those are only optimized
7910 when generating RTL, not later.
7911 And finally, if we are compiling an initializer, not code, we
7912 need to return a definite result now; there's not going to be any
7913 more optimization done. */
7914 if (TREE_SIDE_EFFECTS (arg)
7915 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7916 || POINTER_TYPE_P (TREE_TYPE (arg))
7917 || cfun == 0
7918 || folding_initializer
7919 || force_folding_builtin_constant_p)
7920 return integer_zero_node;
7922 return NULL_TREE;
7925 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7926 return it as a truthvalue. */
7928 static tree
7929 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7930 tree predictor)
7932 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7934 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7935 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7936 ret_type = TREE_TYPE (TREE_TYPE (fn));
7937 pred_type = TREE_VALUE (arg_types);
7938 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7940 pred = fold_convert_loc (loc, pred_type, pred);
7941 expected = fold_convert_loc (loc, expected_type, expected);
7942 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7943 predictor);
7945 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7946 build_int_cst (ret_type, 0));
7949 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7950 NULL_TREE if no simplification is possible. */
7952 tree
7953 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7955 tree inner, fndecl, inner_arg0;
7956 enum tree_code code;
7958 /* Distribute the expected value over short-circuiting operators.
7959 See through the cast from truthvalue_type_node to long. */
7960 inner_arg0 = arg0;
7961 while (CONVERT_EXPR_P (inner_arg0)
7962 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7963 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7964 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7966 /* If this is a builtin_expect within a builtin_expect keep the
7967 inner one. See through a comparison against a constant. It
7968 might have been added to create a thruthvalue. */
7969 inner = inner_arg0;
7971 if (COMPARISON_CLASS_P (inner)
7972 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7973 inner = TREE_OPERAND (inner, 0);
7975 if (TREE_CODE (inner) == CALL_EXPR
7976 && (fndecl = get_callee_fndecl (inner))
7977 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7978 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7979 return arg0;
7981 inner = inner_arg0;
7982 code = TREE_CODE (inner);
7983 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7985 tree op0 = TREE_OPERAND (inner, 0);
7986 tree op1 = TREE_OPERAND (inner, 1);
7988 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7989 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7990 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7992 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7995 /* If the argument isn't invariant then there's nothing else we can do. */
7996 if (!TREE_CONSTANT (inner_arg0))
7997 return NULL_TREE;
7999 /* If we expect that a comparison against the argument will fold to
8000 a constant return the constant. In practice, this means a true
8001 constant or the address of a non-weak symbol. */
8002 inner = inner_arg0;
8003 STRIP_NOPS (inner);
8004 if (TREE_CODE (inner) == ADDR_EXPR)
8008 inner = TREE_OPERAND (inner, 0);
8010 while (TREE_CODE (inner) == COMPONENT_REF
8011 || TREE_CODE (inner) == ARRAY_REF);
8012 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8013 return NULL_TREE;
8016 /* Otherwise, ARG0 already has the proper type for the return value. */
8017 return arg0;
8020 /* Fold a call to __builtin_classify_type with argument ARG. */
8022 static tree
8023 fold_builtin_classify_type (tree arg)
8025 if (arg == 0)
8026 return build_int_cst (integer_type_node, no_type_class);
8028 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8031 /* Fold a call to __builtin_strlen with argument ARG. */
8033 static tree
8034 fold_builtin_strlen (location_t loc, tree type, tree arg)
8036 if (!validate_arg (arg, POINTER_TYPE))
8037 return NULL_TREE;
8038 else
8040 tree len = c_strlen (arg, 0);
8042 if (len)
8043 return fold_convert_loc (loc, type, len);
8045 return NULL_TREE;
8049 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8051 static tree
8052 fold_builtin_inf (location_t loc, tree type, int warn)
8054 REAL_VALUE_TYPE real;
8056 /* __builtin_inff is intended to be usable to define INFINITY on all
8057 targets. If an infinity is not available, INFINITY expands "to a
8058 positive constant of type float that overflows at translation
8059 time", footnote "In this case, using INFINITY will violate the
8060 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8061 Thus we pedwarn to ensure this constraint violation is
8062 diagnosed. */
8063 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8064 pedwarn (loc, 0, "target format does not support infinity");
8066 real_inf (&real);
8067 return build_real (type, real);
8070 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8071 NULL_TREE if no simplification can be made. */
8073 static tree
8074 fold_builtin_sincos (location_t loc,
8075 tree arg0, tree arg1, tree arg2)
8077 tree type;
8078 tree fndecl, call = NULL_TREE;
8080 if (!validate_arg (arg0, REAL_TYPE)
8081 || !validate_arg (arg1, POINTER_TYPE)
8082 || !validate_arg (arg2, POINTER_TYPE))
8083 return NULL_TREE;
8085 type = TREE_TYPE (arg0);
8087 /* Calculate the result when the argument is a constant. */
8088 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8089 if (fn == END_BUILTINS)
8090 return NULL_TREE;
8092 /* Canonicalize sincos to cexpi. */
8093 if (TREE_CODE (arg0) == REAL_CST)
8095 tree complex_type = build_complex_type (type);
8096 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8098 if (!call)
8100 if (!targetm.libc_has_function (function_c99_math_complex)
8101 || !builtin_decl_implicit_p (fn))
8102 return NULL_TREE;
8103 fndecl = builtin_decl_explicit (fn);
8104 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8105 call = builtin_save_expr (call);
8108 return build2 (COMPOUND_EXPR, void_type_node,
8109 build2 (MODIFY_EXPR, void_type_node,
8110 build_fold_indirect_ref_loc (loc, arg1),
8111 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8112 build2 (MODIFY_EXPR, void_type_node,
8113 build_fold_indirect_ref_loc (loc, arg2),
8114 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8117 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8118 Return NULL_TREE if no simplification can be made. */
8120 static tree
8121 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8123 if (!validate_arg (arg1, POINTER_TYPE)
8124 || !validate_arg (arg2, POINTER_TYPE)
8125 || !validate_arg (len, INTEGER_TYPE))
8126 return NULL_TREE;
8128 /* If the LEN parameter is zero, return zero. */
8129 if (integer_zerop (len))
8130 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8131 arg1, arg2);
8133 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8134 if (operand_equal_p (arg1, arg2, 0))
8135 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8137 /* If len parameter is one, return an expression corresponding to
8138 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8139 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8141 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8142 tree cst_uchar_ptr_node
8143 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8145 tree ind1
8146 = fold_convert_loc (loc, integer_type_node,
8147 build1 (INDIRECT_REF, cst_uchar_node,
8148 fold_convert_loc (loc,
8149 cst_uchar_ptr_node,
8150 arg1)));
8151 tree ind2
8152 = fold_convert_loc (loc, integer_type_node,
8153 build1 (INDIRECT_REF, cst_uchar_node,
8154 fold_convert_loc (loc,
8155 cst_uchar_ptr_node,
8156 arg2)));
8157 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8160 return NULL_TREE;
8163 /* Fold a call to builtin isascii with argument ARG. */
8165 static tree
8166 fold_builtin_isascii (location_t loc, tree arg)
8168 if (!validate_arg (arg, INTEGER_TYPE))
8169 return NULL_TREE;
8170 else
8172 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8173 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8174 build_int_cst (integer_type_node,
8175 ~ (unsigned HOST_WIDE_INT) 0x7f));
8176 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8177 arg, integer_zero_node);
8181 /* Fold a call to builtin toascii with argument ARG. */
8183 static tree
8184 fold_builtin_toascii (location_t loc, tree arg)
8186 if (!validate_arg (arg, INTEGER_TYPE))
8187 return NULL_TREE;
8189 /* Transform toascii(c) -> (c & 0x7f). */
8190 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8191 build_int_cst (integer_type_node, 0x7f));
8194 /* Fold a call to builtin isdigit with argument ARG. */
8196 static tree
8197 fold_builtin_isdigit (location_t loc, tree arg)
8199 if (!validate_arg (arg, INTEGER_TYPE))
8200 return NULL_TREE;
8201 else
8203 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8204 /* According to the C standard, isdigit is unaffected by locale.
8205 However, it definitely is affected by the target character set. */
8206 unsigned HOST_WIDE_INT target_digit0
8207 = lang_hooks.to_target_charset ('0');
8209 if (target_digit0 == 0)
8210 return NULL_TREE;
8212 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8213 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8214 build_int_cst (unsigned_type_node, target_digit0));
8215 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8216 build_int_cst (unsigned_type_node, 9));
8220 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8222 static tree
8223 fold_builtin_fabs (location_t loc, tree arg, tree type)
8225 if (!validate_arg (arg, REAL_TYPE))
8226 return NULL_TREE;
8228 arg = fold_convert_loc (loc, type, arg);
8229 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8232 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8234 static tree
8235 fold_builtin_abs (location_t loc, tree arg, tree type)
8237 if (!validate_arg (arg, INTEGER_TYPE))
8238 return NULL_TREE;
8240 arg = fold_convert_loc (loc, type, arg);
8241 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8244 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8246 static tree
8247 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8249 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8250 if (validate_arg (arg0, REAL_TYPE)
8251 && validate_arg (arg1, REAL_TYPE)
8252 && validate_arg (arg2, REAL_TYPE)
8253 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8254 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8256 return NULL_TREE;
8259 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8261 static tree
8262 fold_builtin_carg (location_t loc, tree arg, tree type)
8264 if (validate_arg (arg, COMPLEX_TYPE)
8265 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8267 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8269 if (atan2_fn)
8271 tree new_arg = builtin_save_expr (arg);
8272 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8273 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8274 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8278 return NULL_TREE;
8281 /* Fold a call to builtin frexp, we can assume the base is 2. */
8283 static tree
8284 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8286 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8287 return NULL_TREE;
8289 STRIP_NOPS (arg0);
8291 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8292 return NULL_TREE;
8294 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8296 /* Proceed if a valid pointer type was passed in. */
8297 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8299 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8300 tree frac, exp;
8302 switch (value->cl)
8304 case rvc_zero:
8305 /* For +-0, return (*exp = 0, +-0). */
8306 exp = integer_zero_node;
8307 frac = arg0;
8308 break;
8309 case rvc_nan:
8310 case rvc_inf:
8311 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8312 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8313 case rvc_normal:
8315 /* Since the frexp function always expects base 2, and in
8316 GCC normalized significands are already in the range
8317 [0.5, 1.0), we have exactly what frexp wants. */
8318 REAL_VALUE_TYPE frac_rvt = *value;
8319 SET_REAL_EXP (&frac_rvt, 0);
8320 frac = build_real (rettype, frac_rvt);
8321 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8323 break;
8324 default:
8325 gcc_unreachable ();
8328 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8329 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8330 TREE_SIDE_EFFECTS (arg1) = 1;
8331 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8334 return NULL_TREE;
8337 /* Fold a call to builtin modf. */
8339 static tree
8340 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8342 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8343 return NULL_TREE;
8345 STRIP_NOPS (arg0);
8347 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8348 return NULL_TREE;
8350 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8352 /* Proceed if a valid pointer type was passed in. */
8353 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8355 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8356 REAL_VALUE_TYPE trunc, frac;
8358 switch (value->cl)
8360 case rvc_nan:
8361 case rvc_zero:
8362 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8363 trunc = frac = *value;
8364 break;
8365 case rvc_inf:
8366 /* For +-Inf, return (*arg1 = arg0, +-0). */
8367 frac = dconst0;
8368 frac.sign = value->sign;
8369 trunc = *value;
8370 break;
8371 case rvc_normal:
8372 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8373 real_trunc (&trunc, VOIDmode, value);
8374 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8375 /* If the original number was negative and already
8376 integral, then the fractional part is -0.0. */
8377 if (value->sign && frac.cl == rvc_zero)
8378 frac.sign = value->sign;
8379 break;
8382 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8383 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8384 build_real (rettype, trunc));
8385 TREE_SIDE_EFFECTS (arg1) = 1;
8386 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8387 build_real (rettype, frac));
8390 return NULL_TREE;
8393 /* Given a location LOC, an interclass builtin function decl FNDECL
8394 and its single argument ARG, return an folded expression computing
8395 the same, or NULL_TREE if we either couldn't or didn't want to fold
8396 (the latter happen if there's an RTL instruction available). */
8398 static tree
8399 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8401 machine_mode mode;
8403 if (!validate_arg (arg, REAL_TYPE))
8404 return NULL_TREE;
8406 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8407 return NULL_TREE;
8409 mode = TYPE_MODE (TREE_TYPE (arg));
8411 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8413 /* If there is no optab, try generic code. */
8414 switch (DECL_FUNCTION_CODE (fndecl))
8416 tree result;
8418 CASE_FLT_FN (BUILT_IN_ISINF):
8420 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8421 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8422 tree type = TREE_TYPE (arg);
8423 REAL_VALUE_TYPE r;
8424 char buf[128];
8426 if (is_ibm_extended)
8428 /* NaN and Inf are encoded in the high-order double value
8429 only. The low-order value is not significant. */
8430 type = double_type_node;
8431 mode = DFmode;
8432 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8434 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8435 real_from_string (&r, buf);
8436 result = build_call_expr (isgr_fn, 2,
8437 fold_build1_loc (loc, ABS_EXPR, type, arg),
8438 build_real (type, r));
8439 return result;
8441 CASE_FLT_FN (BUILT_IN_FINITE):
8442 case BUILT_IN_ISFINITE:
8444 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8445 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8446 tree type = TREE_TYPE (arg);
8447 REAL_VALUE_TYPE r;
8448 char buf[128];
8450 if (is_ibm_extended)
8452 /* NaN and Inf are encoded in the high-order double value
8453 only. The low-order value is not significant. */
8454 type = double_type_node;
8455 mode = DFmode;
8456 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8458 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8459 real_from_string (&r, buf);
8460 result = build_call_expr (isle_fn, 2,
8461 fold_build1_loc (loc, ABS_EXPR, type, arg),
8462 build_real (type, r));
8463 /*result = fold_build2_loc (loc, UNGT_EXPR,
8464 TREE_TYPE (TREE_TYPE (fndecl)),
8465 fold_build1_loc (loc, ABS_EXPR, type, arg),
8466 build_real (type, r));
8467 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8468 TREE_TYPE (TREE_TYPE (fndecl)),
8469 result);*/
8470 return result;
8472 case BUILT_IN_ISNORMAL:
8474 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8475 islessequal(fabs(x),DBL_MAX). */
8476 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8477 tree type = TREE_TYPE (arg);
8478 tree orig_arg, max_exp, min_exp;
8479 machine_mode orig_mode = mode;
8480 REAL_VALUE_TYPE rmax, rmin;
8481 char buf[128];
8483 orig_arg = arg = builtin_save_expr (arg);
8484 if (is_ibm_extended)
8486 /* Use double to test the normal range of IBM extended
8487 precision. Emin for IBM extended precision is
8488 different to emin for IEEE double, being 53 higher
8489 since the low double exponent is at least 53 lower
8490 than the high double exponent. */
8491 type = double_type_node;
8492 mode = DFmode;
8493 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8495 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8497 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8498 real_from_string (&rmax, buf);
8499 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8500 real_from_string (&rmin, buf);
8501 max_exp = build_real (type, rmax);
8502 min_exp = build_real (type, rmin);
8504 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8505 if (is_ibm_extended)
8507 /* Testing the high end of the range is done just using
8508 the high double, using the same test as isfinite().
8509 For the subnormal end of the range we first test the
8510 high double, then if its magnitude is equal to the
8511 limit of 0x1p-969, we test whether the low double is
8512 non-zero and opposite sign to the high double. */
8513 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8514 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8515 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8516 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8517 arg, min_exp);
8518 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8519 complex_double_type_node, orig_arg);
8520 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8521 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8522 tree zero = build_real (type, dconst0);
8523 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8524 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8525 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8526 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8527 fold_build3 (COND_EXPR,
8528 integer_type_node,
8529 hilt, logt, lolt));
8530 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8531 eq_min, ok_lo);
8532 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8533 gt_min, eq_min);
8535 else
8537 tree const isge_fn
8538 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8539 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8541 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8542 max_exp, min_exp);
8543 return result;
8545 default:
8546 break;
8549 return NULL_TREE;
8552 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8553 ARG is the argument for the call. */
8555 static tree
8556 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8558 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8560 if (!validate_arg (arg, REAL_TYPE))
8561 return NULL_TREE;
8563 switch (builtin_index)
8565 case BUILT_IN_ISINF:
8566 if (!HONOR_INFINITIES (arg))
8567 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8569 return NULL_TREE;
8571 case BUILT_IN_ISINF_SIGN:
8573 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8574 /* In a boolean context, GCC will fold the inner COND_EXPR to
8575 1. So e.g. "if (isinf_sign(x))" would be folded to just
8576 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8577 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8578 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8579 tree tmp = NULL_TREE;
8581 arg = builtin_save_expr (arg);
8583 if (signbit_fn && isinf_fn)
8585 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8586 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8588 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8589 signbit_call, integer_zero_node);
8590 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8591 isinf_call, integer_zero_node);
8593 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8594 integer_minus_one_node, integer_one_node);
8595 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8596 isinf_call, tmp,
8597 integer_zero_node);
8600 return tmp;
8603 case BUILT_IN_ISFINITE:
8604 if (!HONOR_NANS (arg)
8605 && !HONOR_INFINITIES (arg))
8606 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8608 return NULL_TREE;
8610 case BUILT_IN_ISNAN:
8611 if (!HONOR_NANS (arg))
8612 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8615 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8616 if (is_ibm_extended)
8618 /* NaN and Inf are encoded in the high-order double value
8619 only. The low-order value is not significant. */
8620 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8623 arg = builtin_save_expr (arg);
8624 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8626 default:
8627 gcc_unreachable ();
8631 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8632 This builtin will generate code to return the appropriate floating
8633 point classification depending on the value of the floating point
8634 number passed in. The possible return values must be supplied as
8635 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8636 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8637 one floating point argument which is "type generic". */
8639 static tree
8640 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8642 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8643 arg, type, res, tmp;
8644 machine_mode mode;
8645 REAL_VALUE_TYPE r;
8646 char buf[128];
8648 /* Verify the required arguments in the original call. */
8649 if (nargs != 6
8650 || !validate_arg (args[0], INTEGER_TYPE)
8651 || !validate_arg (args[1], INTEGER_TYPE)
8652 || !validate_arg (args[2], INTEGER_TYPE)
8653 || !validate_arg (args[3], INTEGER_TYPE)
8654 || !validate_arg (args[4], INTEGER_TYPE)
8655 || !validate_arg (args[5], REAL_TYPE))
8656 return NULL_TREE;
8658 fp_nan = args[0];
8659 fp_infinite = args[1];
8660 fp_normal = args[2];
8661 fp_subnormal = args[3];
8662 fp_zero = args[4];
8663 arg = args[5];
8664 type = TREE_TYPE (arg);
8665 mode = TYPE_MODE (type);
8666 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8668 /* fpclassify(x) ->
8669 isnan(x) ? FP_NAN :
8670 (fabs(x) == Inf ? FP_INFINITE :
8671 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8672 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8674 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8675 build_real (type, dconst0));
8676 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8677 tmp, fp_zero, fp_subnormal);
8679 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8680 real_from_string (&r, buf);
8681 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8682 arg, build_real (type, r));
8683 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8685 if (HONOR_INFINITIES (mode))
8687 real_inf (&r);
8688 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8689 build_real (type, r));
8690 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8691 fp_infinite, res);
8694 if (HONOR_NANS (mode))
8696 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8697 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8700 return res;
8703 /* Fold a call to an unordered comparison function such as
8704 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8705 being called and ARG0 and ARG1 are the arguments for the call.
8706 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8707 the opposite of the desired result. UNORDERED_CODE is used
8708 for modes that can hold NaNs and ORDERED_CODE is used for
8709 the rest. */
8711 static tree
8712 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8713 enum tree_code unordered_code,
8714 enum tree_code ordered_code)
8716 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8717 enum tree_code code;
8718 tree type0, type1;
8719 enum tree_code code0, code1;
8720 tree cmp_type = NULL_TREE;
8722 type0 = TREE_TYPE (arg0);
8723 type1 = TREE_TYPE (arg1);
8725 code0 = TREE_CODE (type0);
8726 code1 = TREE_CODE (type1);
8728 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8729 /* Choose the wider of two real types. */
8730 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8731 ? type0 : type1;
8732 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8733 cmp_type = type0;
8734 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8735 cmp_type = type1;
8737 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8738 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8740 if (unordered_code == UNORDERED_EXPR)
8742 if (!HONOR_NANS (arg0))
8743 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8744 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8747 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8748 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8749 fold_build2_loc (loc, code, type, arg0, arg1));
8752 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8753 arithmetics if it can never overflow, or into internal functions that
8754 return both result of arithmetics and overflowed boolean flag in
8755 a complex integer result, or some other check for overflow.
8756 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8757 checking part of that. */
8759 static tree
8760 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8761 tree arg0, tree arg1, tree arg2)
8763 enum internal_fn ifn = IFN_LAST;
8764 /* The code of the expression corresponding to the type-generic
8765 built-in, or ERROR_MARK for the type-specific ones. */
8766 enum tree_code opcode = ERROR_MARK;
8767 bool ovf_only = false;
8769 switch (fcode)
8771 case BUILT_IN_ADD_OVERFLOW_P:
8772 ovf_only = true;
8773 /* FALLTHRU */
8774 case BUILT_IN_ADD_OVERFLOW:
8775 opcode = PLUS_EXPR;
8776 /* FALLTHRU */
8777 case BUILT_IN_SADD_OVERFLOW:
8778 case BUILT_IN_SADDL_OVERFLOW:
8779 case BUILT_IN_SADDLL_OVERFLOW:
8780 case BUILT_IN_UADD_OVERFLOW:
8781 case BUILT_IN_UADDL_OVERFLOW:
8782 case BUILT_IN_UADDLL_OVERFLOW:
8783 ifn = IFN_ADD_OVERFLOW;
8784 break;
8785 case BUILT_IN_SUB_OVERFLOW_P:
8786 ovf_only = true;
8787 /* FALLTHRU */
8788 case BUILT_IN_SUB_OVERFLOW:
8789 opcode = MINUS_EXPR;
8790 /* FALLTHRU */
8791 case BUILT_IN_SSUB_OVERFLOW:
8792 case BUILT_IN_SSUBL_OVERFLOW:
8793 case BUILT_IN_SSUBLL_OVERFLOW:
8794 case BUILT_IN_USUB_OVERFLOW:
8795 case BUILT_IN_USUBL_OVERFLOW:
8796 case BUILT_IN_USUBLL_OVERFLOW:
8797 ifn = IFN_SUB_OVERFLOW;
8798 break;
8799 case BUILT_IN_MUL_OVERFLOW_P:
8800 ovf_only = true;
8801 /* FALLTHRU */
8802 case BUILT_IN_MUL_OVERFLOW:
8803 opcode = MULT_EXPR;
8804 /* FALLTHRU */
8805 case BUILT_IN_SMUL_OVERFLOW:
8806 case BUILT_IN_SMULL_OVERFLOW:
8807 case BUILT_IN_SMULLL_OVERFLOW:
8808 case BUILT_IN_UMUL_OVERFLOW:
8809 case BUILT_IN_UMULL_OVERFLOW:
8810 case BUILT_IN_UMULLL_OVERFLOW:
8811 ifn = IFN_MUL_OVERFLOW;
8812 break;
8813 default:
8814 gcc_unreachable ();
8817 /* For the "generic" overloads, the first two arguments can have different
8818 types and the last argument determines the target type to use to check
8819 for overflow. The arguments of the other overloads all have the same
8820 type. */
8821 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8823 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8824 arguments are constant, attempt to fold the built-in call into a constant
8825 expression indicating whether or not it detected an overflow. */
8826 if (ovf_only
8827 && TREE_CODE (arg0) == INTEGER_CST
8828 && TREE_CODE (arg1) == INTEGER_CST)
8829 /* Perform the computation in the target type and check for overflow. */
8830 return omit_one_operand_loc (loc, boolean_type_node,
8831 arith_overflowed_p (opcode, type, arg0, arg1)
8832 ? boolean_true_node : boolean_false_node,
8833 arg2);
8835 tree ctype = build_complex_type (type);
8836 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8837 2, arg0, arg1);
8838 tree tgt = save_expr (call);
8839 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8840 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8841 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8843 if (ovf_only)
8844 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8846 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8847 tree store
8848 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8849 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8852 /* Fold a call to __builtin_FILE to a constant string. */
8854 static inline tree
8855 fold_builtin_FILE (location_t loc)
8857 if (const char *fname = LOCATION_FILE (loc))
8858 return build_string_literal (strlen (fname) + 1, fname);
8860 return build_string_literal (1, "");
8863 /* Fold a call to __builtin_FUNCTION to a constant string. */
8865 static inline tree
8866 fold_builtin_FUNCTION ()
8868 const char *name = "";
8870 if (current_function_decl)
8871 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8873 return build_string_literal (strlen (name) + 1, name);
8876 /* Fold a call to __builtin_LINE to an integer constant. */
8878 static inline tree
8879 fold_builtin_LINE (location_t loc, tree type)
8881 return build_int_cst (type, LOCATION_LINE (loc));
8884 /* Fold a call to built-in function FNDECL with 0 arguments.
8885 This function returns NULL_TREE if no simplification was possible. */
8887 static tree
8888 fold_builtin_0 (location_t loc, tree fndecl)
8890 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8891 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8892 switch (fcode)
8894 case BUILT_IN_FILE:
8895 return fold_builtin_FILE (loc);
8897 case BUILT_IN_FUNCTION:
8898 return fold_builtin_FUNCTION ();
8900 case BUILT_IN_LINE:
8901 return fold_builtin_LINE (loc, type);
8903 CASE_FLT_FN (BUILT_IN_INF):
8904 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8905 case BUILT_IN_INFD32:
8906 case BUILT_IN_INFD64:
8907 case BUILT_IN_INFD128:
8908 return fold_builtin_inf (loc, type, true);
8910 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8911 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8912 return fold_builtin_inf (loc, type, false);
8914 case BUILT_IN_CLASSIFY_TYPE:
8915 return fold_builtin_classify_type (NULL_TREE);
8917 default:
8918 break;
8920 return NULL_TREE;
8923 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8924 This function returns NULL_TREE if no simplification was possible. */
8926 static tree
8927 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8929 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8930 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8932 if (TREE_CODE (arg0) == ERROR_MARK)
8933 return NULL_TREE;
8935 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8936 return ret;
8938 switch (fcode)
8940 case BUILT_IN_CONSTANT_P:
8942 tree val = fold_builtin_constant_p (arg0);
8944 /* Gimplification will pull the CALL_EXPR for the builtin out of
8945 an if condition. When not optimizing, we'll not CSE it back.
8946 To avoid link error types of regressions, return false now. */
8947 if (!val && !optimize)
8948 val = integer_zero_node;
8950 return val;
8953 case BUILT_IN_CLASSIFY_TYPE:
8954 return fold_builtin_classify_type (arg0);
8956 case BUILT_IN_STRLEN:
8957 return fold_builtin_strlen (loc, type, arg0);
8959 CASE_FLT_FN (BUILT_IN_FABS):
8960 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8961 case BUILT_IN_FABSD32:
8962 case BUILT_IN_FABSD64:
8963 case BUILT_IN_FABSD128:
8964 return fold_builtin_fabs (loc, arg0, type);
8966 case BUILT_IN_ABS:
8967 case BUILT_IN_LABS:
8968 case BUILT_IN_LLABS:
8969 case BUILT_IN_IMAXABS:
8970 return fold_builtin_abs (loc, arg0, type);
8972 CASE_FLT_FN (BUILT_IN_CONJ):
8973 if (validate_arg (arg0, COMPLEX_TYPE)
8974 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8975 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8976 break;
8978 CASE_FLT_FN (BUILT_IN_CREAL):
8979 if (validate_arg (arg0, COMPLEX_TYPE)
8980 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8981 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8982 break;
8984 CASE_FLT_FN (BUILT_IN_CIMAG):
8985 if (validate_arg (arg0, COMPLEX_TYPE)
8986 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8987 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8988 break;
8990 CASE_FLT_FN (BUILT_IN_CARG):
8991 return fold_builtin_carg (loc, arg0, type);
8993 case BUILT_IN_ISASCII:
8994 return fold_builtin_isascii (loc, arg0);
8996 case BUILT_IN_TOASCII:
8997 return fold_builtin_toascii (loc, arg0);
8999 case BUILT_IN_ISDIGIT:
9000 return fold_builtin_isdigit (loc, arg0);
9002 CASE_FLT_FN (BUILT_IN_FINITE):
9003 case BUILT_IN_FINITED32:
9004 case BUILT_IN_FINITED64:
9005 case BUILT_IN_FINITED128:
9006 case BUILT_IN_ISFINITE:
9008 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9009 if (ret)
9010 return ret;
9011 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9014 CASE_FLT_FN (BUILT_IN_ISINF):
9015 case BUILT_IN_ISINFD32:
9016 case BUILT_IN_ISINFD64:
9017 case BUILT_IN_ISINFD128:
9019 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9020 if (ret)
9021 return ret;
9022 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9025 case BUILT_IN_ISNORMAL:
9026 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9028 case BUILT_IN_ISINF_SIGN:
9029 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9031 CASE_FLT_FN (BUILT_IN_ISNAN):
9032 case BUILT_IN_ISNAND32:
9033 case BUILT_IN_ISNAND64:
9034 case BUILT_IN_ISNAND128:
9035 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9037 case BUILT_IN_FREE:
9038 if (integer_zerop (arg0))
9039 return build_empty_stmt (loc);
9040 break;
9042 default:
9043 break;
9046 return NULL_TREE;
9050 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9051 This function returns NULL_TREE if no simplification was possible. */
9053 static tree
9054 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9056 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9057 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9059 if (TREE_CODE (arg0) == ERROR_MARK
9060 || TREE_CODE (arg1) == ERROR_MARK)
9061 return NULL_TREE;
9063 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9064 return ret;
9066 switch (fcode)
9068 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9069 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9070 if (validate_arg (arg0, REAL_TYPE)
9071 && validate_arg (arg1, POINTER_TYPE))
9072 return do_mpfr_lgamma_r (arg0, arg1, type);
9073 break;
9075 CASE_FLT_FN (BUILT_IN_FREXP):
9076 return fold_builtin_frexp (loc, arg0, arg1, type);
9078 CASE_FLT_FN (BUILT_IN_MODF):
9079 return fold_builtin_modf (loc, arg0, arg1, type);
9081 case BUILT_IN_STRSPN:
9082 return fold_builtin_strspn (loc, arg0, arg1);
9084 case BUILT_IN_STRCSPN:
9085 return fold_builtin_strcspn (loc, arg0, arg1);
9087 case BUILT_IN_STRPBRK:
9088 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9090 case BUILT_IN_EXPECT:
9091 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9093 case BUILT_IN_ISGREATER:
9094 return fold_builtin_unordered_cmp (loc, fndecl,
9095 arg0, arg1, UNLE_EXPR, LE_EXPR);
9096 case BUILT_IN_ISGREATEREQUAL:
9097 return fold_builtin_unordered_cmp (loc, fndecl,
9098 arg0, arg1, UNLT_EXPR, LT_EXPR);
9099 case BUILT_IN_ISLESS:
9100 return fold_builtin_unordered_cmp (loc, fndecl,
9101 arg0, arg1, UNGE_EXPR, GE_EXPR);
9102 case BUILT_IN_ISLESSEQUAL:
9103 return fold_builtin_unordered_cmp (loc, fndecl,
9104 arg0, arg1, UNGT_EXPR, GT_EXPR);
9105 case BUILT_IN_ISLESSGREATER:
9106 return fold_builtin_unordered_cmp (loc, fndecl,
9107 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9108 case BUILT_IN_ISUNORDERED:
9109 return fold_builtin_unordered_cmp (loc, fndecl,
9110 arg0, arg1, UNORDERED_EXPR,
9111 NOP_EXPR);
9113 /* We do the folding for va_start in the expander. */
9114 case BUILT_IN_VA_START:
9115 break;
9117 case BUILT_IN_OBJECT_SIZE:
9118 return fold_builtin_object_size (arg0, arg1);
9120 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9121 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9123 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9124 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9126 default:
9127 break;
9129 return NULL_TREE;
9132 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9133 and ARG2.
9134 This function returns NULL_TREE if no simplification was possible. */
9136 static tree
9137 fold_builtin_3 (location_t loc, tree fndecl,
9138 tree arg0, tree arg1, tree arg2)
9140 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9141 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9143 if (TREE_CODE (arg0) == ERROR_MARK
9144 || TREE_CODE (arg1) == ERROR_MARK
9145 || TREE_CODE (arg2) == ERROR_MARK)
9146 return NULL_TREE;
9148 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9149 arg0, arg1, arg2))
9150 return ret;
9152 switch (fcode)
9155 CASE_FLT_FN (BUILT_IN_SINCOS):
9156 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9158 CASE_FLT_FN (BUILT_IN_FMA):
9159 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9160 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9162 CASE_FLT_FN (BUILT_IN_REMQUO):
9163 if (validate_arg (arg0, REAL_TYPE)
9164 && validate_arg (arg1, REAL_TYPE)
9165 && validate_arg (arg2, POINTER_TYPE))
9166 return do_mpfr_remquo (arg0, arg1, arg2);
9167 break;
9169 case BUILT_IN_MEMCMP:
9170 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9172 case BUILT_IN_EXPECT:
9173 return fold_builtin_expect (loc, arg0, arg1, arg2);
9175 case BUILT_IN_ADD_OVERFLOW:
9176 case BUILT_IN_SUB_OVERFLOW:
9177 case BUILT_IN_MUL_OVERFLOW:
9178 case BUILT_IN_ADD_OVERFLOW_P:
9179 case BUILT_IN_SUB_OVERFLOW_P:
9180 case BUILT_IN_MUL_OVERFLOW_P:
9181 case BUILT_IN_SADD_OVERFLOW:
9182 case BUILT_IN_SADDL_OVERFLOW:
9183 case BUILT_IN_SADDLL_OVERFLOW:
9184 case BUILT_IN_SSUB_OVERFLOW:
9185 case BUILT_IN_SSUBL_OVERFLOW:
9186 case BUILT_IN_SSUBLL_OVERFLOW:
9187 case BUILT_IN_SMUL_OVERFLOW:
9188 case BUILT_IN_SMULL_OVERFLOW:
9189 case BUILT_IN_SMULLL_OVERFLOW:
9190 case BUILT_IN_UADD_OVERFLOW:
9191 case BUILT_IN_UADDL_OVERFLOW:
9192 case BUILT_IN_UADDLL_OVERFLOW:
9193 case BUILT_IN_USUB_OVERFLOW:
9194 case BUILT_IN_USUBL_OVERFLOW:
9195 case BUILT_IN_USUBLL_OVERFLOW:
9196 case BUILT_IN_UMUL_OVERFLOW:
9197 case BUILT_IN_UMULL_OVERFLOW:
9198 case BUILT_IN_UMULLL_OVERFLOW:
9199 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9201 default:
9202 break;
9204 return NULL_TREE;
9207 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9208 arguments. IGNORE is true if the result of the
9209 function call is ignored. This function returns NULL_TREE if no
9210 simplification was possible. */
9212 tree
9213 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9215 tree ret = NULL_TREE;
9217 switch (nargs)
9219 case 0:
9220 ret = fold_builtin_0 (loc, fndecl);
9221 break;
9222 case 1:
9223 ret = fold_builtin_1 (loc, fndecl, args[0]);
9224 break;
9225 case 2:
9226 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9227 break;
9228 case 3:
9229 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9230 break;
9231 default:
9232 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9233 break;
9235 if (ret)
9237 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9238 SET_EXPR_LOCATION (ret, loc);
9239 TREE_NO_WARNING (ret) = 1;
9240 return ret;
9242 return NULL_TREE;
9245 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9246 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9247 of arguments in ARGS to be omitted. OLDNARGS is the number of
9248 elements in ARGS. */
9250 static tree
9251 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9252 int skip, tree fndecl, int n, va_list newargs)
9254 int nargs = oldnargs - skip + n;
9255 tree *buffer;
9257 if (n > 0)
9259 int i, j;
9261 buffer = XALLOCAVEC (tree, nargs);
9262 for (i = 0; i < n; i++)
9263 buffer[i] = va_arg (newargs, tree);
9264 for (j = skip; j < oldnargs; j++, i++)
9265 buffer[i] = args[j];
9267 else
9268 buffer = args + skip;
9270 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9273 /* Return true if FNDECL shouldn't be folded right now.
9274 If a built-in function has an inline attribute always_inline
9275 wrapper, defer folding it after always_inline functions have
9276 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9277 might not be performed. */
9279 bool
9280 avoid_folding_inline_builtin (tree fndecl)
9282 return (DECL_DECLARED_INLINE_P (fndecl)
9283 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9284 && cfun
9285 && !cfun->always_inline_functions_inlined
9286 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9289 /* A wrapper function for builtin folding that prevents warnings for
9290 "statement without effect" and the like, caused by removing the
9291 call node earlier than the warning is generated. */
9293 tree
9294 fold_call_expr (location_t loc, tree exp, bool ignore)
9296 tree ret = NULL_TREE;
9297 tree fndecl = get_callee_fndecl (exp);
9298 if (fndecl
9299 && TREE_CODE (fndecl) == FUNCTION_DECL
9300 && DECL_BUILT_IN (fndecl)
9301 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9302 yet. Defer folding until we see all the arguments
9303 (after inlining). */
9304 && !CALL_EXPR_VA_ARG_PACK (exp))
9306 int nargs = call_expr_nargs (exp);
9308 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9309 instead last argument is __builtin_va_arg_pack (). Defer folding
9310 even in that case, until arguments are finalized. */
9311 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9313 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9314 if (fndecl2
9315 && TREE_CODE (fndecl2) == FUNCTION_DECL
9316 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9317 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9318 return NULL_TREE;
9321 if (avoid_folding_inline_builtin (fndecl))
9322 return NULL_TREE;
9324 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9325 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9326 CALL_EXPR_ARGP (exp), ignore);
9327 else
9329 tree *args = CALL_EXPR_ARGP (exp);
9330 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9331 if (ret)
9332 return ret;
9335 return NULL_TREE;
9338 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9339 N arguments are passed in the array ARGARRAY. Return a folded
9340 expression or NULL_TREE if no simplification was possible. */
9342 tree
9343 fold_builtin_call_array (location_t loc, tree,
9344 tree fn,
9345 int n,
9346 tree *argarray)
9348 if (TREE_CODE (fn) != ADDR_EXPR)
9349 return NULL_TREE;
9351 tree fndecl = TREE_OPERAND (fn, 0);
9352 if (TREE_CODE (fndecl) == FUNCTION_DECL
9353 && DECL_BUILT_IN (fndecl))
9355 /* If last argument is __builtin_va_arg_pack (), arguments to this
9356 function are not finalized yet. Defer folding until they are. */
9357 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9359 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9360 if (fndecl2
9361 && TREE_CODE (fndecl2) == FUNCTION_DECL
9362 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9363 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9364 return NULL_TREE;
9366 if (avoid_folding_inline_builtin (fndecl))
9367 return NULL_TREE;
9368 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9369 return targetm.fold_builtin (fndecl, n, argarray, false);
9370 else
9371 return fold_builtin_n (loc, fndecl, argarray, n, false);
9374 return NULL_TREE;
9377 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9378 along with N new arguments specified as the "..." parameters. SKIP
9379 is the number of arguments in EXP to be omitted. This function is used
9380 to do varargs-to-varargs transformations. */
9382 static tree
9383 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9385 va_list ap;
9386 tree t;
9388 va_start (ap, n);
9389 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9390 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9391 va_end (ap);
9393 return t;
9396 /* Validate a single argument ARG against a tree code CODE representing
9397 a type. Return true when argument is valid. */
9399 static bool
9400 validate_arg (const_tree arg, enum tree_code code)
9402 if (!arg)
9403 return false;
9404 else if (code == POINTER_TYPE)
9405 return POINTER_TYPE_P (TREE_TYPE (arg));
9406 else if (code == INTEGER_TYPE)
9407 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9408 return code == TREE_CODE (TREE_TYPE (arg));
9411 /* This function validates the types of a function call argument list
9412 against a specified list of tree_codes. If the last specifier is a 0,
9413 that represents an ellipses, otherwise the last specifier must be a
9414 VOID_TYPE.
9416 This is the GIMPLE version of validate_arglist. Eventually we want to
9417 completely convert builtins.c to work from GIMPLEs and the tree based
9418 validate_arglist will then be removed. */
9420 bool
9421 validate_gimple_arglist (const gcall *call, ...)
9423 enum tree_code code;
9424 bool res = 0;
9425 va_list ap;
9426 const_tree arg;
9427 size_t i;
9429 va_start (ap, call);
9430 i = 0;
9434 code = (enum tree_code) va_arg (ap, int);
9435 switch (code)
9437 case 0:
9438 /* This signifies an ellipses, any further arguments are all ok. */
9439 res = true;
9440 goto end;
9441 case VOID_TYPE:
9442 /* This signifies an endlink, if no arguments remain, return
9443 true, otherwise return false. */
9444 res = (i == gimple_call_num_args (call));
9445 goto end;
9446 default:
9447 /* If no parameters remain or the parameter's code does not
9448 match the specified code, return false. Otherwise continue
9449 checking any remaining arguments. */
9450 arg = gimple_call_arg (call, i++);
9451 if (!validate_arg (arg, code))
9452 goto end;
9453 break;
9456 while (1);
9458 /* We need gotos here since we can only have one VA_CLOSE in a
9459 function. */
9460 end: ;
9461 va_end (ap);
9463 return res;
9466 /* Default target-specific builtin expander that does nothing. */
9469 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9470 rtx target ATTRIBUTE_UNUSED,
9471 rtx subtarget ATTRIBUTE_UNUSED,
9472 machine_mode mode ATTRIBUTE_UNUSED,
9473 int ignore ATTRIBUTE_UNUSED)
9475 return NULL_RTX;
9478 /* Returns true is EXP represents data that would potentially reside
9479 in a readonly section. */
9481 bool
9482 readonly_data_expr (tree exp)
9484 STRIP_NOPS (exp);
9486 if (TREE_CODE (exp) != ADDR_EXPR)
9487 return false;
9489 exp = get_base_address (TREE_OPERAND (exp, 0));
9490 if (!exp)
9491 return false;
9493 /* Make sure we call decl_readonly_section only for trees it
9494 can handle (since it returns true for everything it doesn't
9495 understand). */
9496 if (TREE_CODE (exp) == STRING_CST
9497 || TREE_CODE (exp) == CONSTRUCTOR
9498 || (VAR_P (exp) && TREE_STATIC (exp)))
9499 return decl_readonly_section (exp, 0);
9500 else
9501 return false;
9504 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9505 to the call, and TYPE is its return type.
9507 Return NULL_TREE if no simplification was possible, otherwise return the
9508 simplified form of the call as a tree.
9510 The simplified form may be a constant or other expression which
9511 computes the same value, but in a more efficient manner (including
9512 calls to other builtin functions).
9514 The call may contain arguments which need to be evaluated, but
9515 which are not useful to determine the result of the call. In
9516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9517 COMPOUND_EXPR will be an argument which must be evaluated.
9518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9519 COMPOUND_EXPR in the chain will contain the tree for the simplified
9520 form of the builtin function call. */
9522 static tree
9523 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9525 if (!validate_arg (s1, POINTER_TYPE)
9526 || !validate_arg (s2, POINTER_TYPE))
9527 return NULL_TREE;
9528 else
9530 tree fn;
9531 const char *p1, *p2;
9533 p2 = c_getstr (s2);
9534 if (p2 == NULL)
9535 return NULL_TREE;
9537 p1 = c_getstr (s1);
9538 if (p1 != NULL)
9540 const char *r = strpbrk (p1, p2);
9541 tree tem;
9543 if (r == NULL)
9544 return build_int_cst (TREE_TYPE (s1), 0);
9546 /* Return an offset into the constant string argument. */
9547 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9548 return fold_convert_loc (loc, type, tem);
9551 if (p2[0] == '\0')
9552 /* strpbrk(x, "") == NULL.
9553 Evaluate and ignore s1 in case it had side-effects. */
9554 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9556 if (p2[1] != '\0')
9557 return NULL_TREE; /* Really call strpbrk. */
9559 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9560 if (!fn)
9561 return NULL_TREE;
9563 /* New argument list transforming strpbrk(s1, s2) to
9564 strchr(s1, s2[0]). */
9565 return build_call_expr_loc (loc, fn, 2, s1,
9566 build_int_cst (integer_type_node, p2[0]));
9570 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9571 to the call.
9573 Return NULL_TREE if no simplification was possible, otherwise return the
9574 simplified form of the call as a tree.
9576 The simplified form may be a constant or other expression which
9577 computes the same value, but in a more efficient manner (including
9578 calls to other builtin functions).
9580 The call may contain arguments which need to be evaluated, but
9581 which are not useful to determine the result of the call. In
9582 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9583 COMPOUND_EXPR will be an argument which must be evaluated.
9584 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9585 COMPOUND_EXPR in the chain will contain the tree for the simplified
9586 form of the builtin function call. */
9588 static tree
9589 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9591 if (!validate_arg (s1, POINTER_TYPE)
9592 || !validate_arg (s2, POINTER_TYPE))
9593 return NULL_TREE;
9594 else
9596 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9598 /* If either argument is "", return NULL_TREE. */
9599 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9600 /* Evaluate and ignore both arguments in case either one has
9601 side-effects. */
9602 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9603 s1, s2);
9604 return NULL_TREE;
9608 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9609 to the call.
9611 Return NULL_TREE if no simplification was possible, otherwise return the
9612 simplified form of the call as a tree.
9614 The simplified form may be a constant or other expression which
9615 computes the same value, but in a more efficient manner (including
9616 calls to other builtin functions).
9618 The call may contain arguments which need to be evaluated, but
9619 which are not useful to determine the result of the call. In
9620 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9621 COMPOUND_EXPR will be an argument which must be evaluated.
9622 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9623 COMPOUND_EXPR in the chain will contain the tree for the simplified
9624 form of the builtin function call. */
9626 static tree
9627 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9629 if (!validate_arg (s1, POINTER_TYPE)
9630 || !validate_arg (s2, POINTER_TYPE))
9631 return NULL_TREE;
9632 else
9634 /* If the first argument is "", return NULL_TREE. */
9635 const char *p1 = c_getstr (s1);
9636 if (p1 && *p1 == '\0')
9638 /* Evaluate and ignore argument s2 in case it has
9639 side-effects. */
9640 return omit_one_operand_loc (loc, size_type_node,
9641 size_zero_node, s2);
9644 /* If the second argument is "", return __builtin_strlen(s1). */
9645 const char *p2 = c_getstr (s2);
9646 if (p2 && *p2 == '\0')
9648 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9650 /* If the replacement _DECL isn't initialized, don't do the
9651 transformation. */
9652 if (!fn)
9653 return NULL_TREE;
9655 return build_call_expr_loc (loc, fn, 1, s1);
9657 return NULL_TREE;
9661 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9662 produced. False otherwise. This is done so that we don't output the error
9663 or warning twice or three times. */
9665 bool
9666 fold_builtin_next_arg (tree exp, bool va_start_p)
9668 tree fntype = TREE_TYPE (current_function_decl);
9669 int nargs = call_expr_nargs (exp);
9670 tree arg;
9671 /* There is good chance the current input_location points inside the
9672 definition of the va_start macro (perhaps on the token for
9673 builtin) in a system header, so warnings will not be emitted.
9674 Use the location in real source code. */
9675 source_location current_location =
9676 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9677 NULL);
9679 if (!stdarg_p (fntype))
9681 error ("%<va_start%> used in function with fixed args");
9682 return true;
9685 if (va_start_p)
9687 if (va_start_p && (nargs != 2))
9689 error ("wrong number of arguments to function %<va_start%>");
9690 return true;
9692 arg = CALL_EXPR_ARG (exp, 1);
9694 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9695 when we checked the arguments and if needed issued a warning. */
9696 else
9698 if (nargs == 0)
9700 /* Evidently an out of date version of <stdarg.h>; can't validate
9701 va_start's second argument, but can still work as intended. */
9702 warning_at (current_location,
9703 OPT_Wvarargs,
9704 "%<__builtin_next_arg%> called without an argument");
9705 return true;
9707 else if (nargs > 1)
9709 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9710 return true;
9712 arg = CALL_EXPR_ARG (exp, 0);
9715 if (TREE_CODE (arg) == SSA_NAME)
9716 arg = SSA_NAME_VAR (arg);
9718 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9719 or __builtin_next_arg (0) the first time we see it, after checking
9720 the arguments and if needed issuing a warning. */
9721 if (!integer_zerop (arg))
9723 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9725 /* Strip off all nops for the sake of the comparison. This
9726 is not quite the same as STRIP_NOPS. It does more.
9727 We must also strip off INDIRECT_EXPR for C++ reference
9728 parameters. */
9729 while (CONVERT_EXPR_P (arg)
9730 || TREE_CODE (arg) == INDIRECT_REF)
9731 arg = TREE_OPERAND (arg, 0);
9732 if (arg != last_parm)
9734 /* FIXME: Sometimes with the tree optimizers we can get the
9735 not the last argument even though the user used the last
9736 argument. We just warn and set the arg to be the last
9737 argument so that we will get wrong-code because of
9738 it. */
9739 warning_at (current_location,
9740 OPT_Wvarargs,
9741 "second parameter of %<va_start%> not last named argument");
9744 /* Undefined by C99 7.15.1.4p4 (va_start):
9745 "If the parameter parmN is declared with the register storage
9746 class, with a function or array type, or with a type that is
9747 not compatible with the type that results after application of
9748 the default argument promotions, the behavior is undefined."
9750 else if (DECL_REGISTER (arg))
9752 warning_at (current_location,
9753 OPT_Wvarargs,
9754 "undefined behavior when second parameter of "
9755 "%<va_start%> is declared with %<register%> storage");
9758 /* We want to verify the second parameter just once before the tree
9759 optimizers are run and then avoid keeping it in the tree,
9760 as otherwise we could warn even for correct code like:
9761 void foo (int i, ...)
9762 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9763 if (va_start_p)
9764 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9765 else
9766 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9768 return false;
9772 /* Expand a call EXP to __builtin_object_size. */
9774 static rtx
9775 expand_builtin_object_size (tree exp)
9777 tree ost;
9778 int object_size_type;
9779 tree fndecl = get_callee_fndecl (exp);
9781 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9783 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9784 exp, fndecl);
9785 expand_builtin_trap ();
9786 return const0_rtx;
9789 ost = CALL_EXPR_ARG (exp, 1);
9790 STRIP_NOPS (ost);
9792 if (TREE_CODE (ost) != INTEGER_CST
9793 || tree_int_cst_sgn (ost) < 0
9794 || compare_tree_int (ost, 3) > 0)
9796 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9797 exp, fndecl);
9798 expand_builtin_trap ();
9799 return const0_rtx;
9802 object_size_type = tree_to_shwi (ost);
9804 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9807 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9808 FCODE is the BUILT_IN_* to use.
9809 Return NULL_RTX if we failed; the caller should emit a normal call,
9810 otherwise try to get the result in TARGET, if convenient (and in
9811 mode MODE if that's convenient). */
9813 static rtx
9814 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9815 enum built_in_function fcode)
9817 if (!validate_arglist (exp,
9818 POINTER_TYPE,
9819 fcode == BUILT_IN_MEMSET_CHK
9820 ? INTEGER_TYPE : POINTER_TYPE,
9821 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9822 return NULL_RTX;
9824 tree dest = CALL_EXPR_ARG (exp, 0);
9825 tree src = CALL_EXPR_ARG (exp, 1);
9826 tree len = CALL_EXPR_ARG (exp, 2);
9827 tree size = CALL_EXPR_ARG (exp, 3);
9829 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9830 /*str=*/NULL_TREE, size);
9832 if (!tree_fits_uhwi_p (size))
9833 return NULL_RTX;
9835 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9837 /* Avoid transforming the checking call to an ordinary one when
9838 an overflow has been detected or when the call couldn't be
9839 validated because the size is not constant. */
9840 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9841 return NULL_RTX;
9843 tree fn = NULL_TREE;
9844 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9845 mem{cpy,pcpy,move,set} is available. */
9846 switch (fcode)
9848 case BUILT_IN_MEMCPY_CHK:
9849 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9850 break;
9851 case BUILT_IN_MEMPCPY_CHK:
9852 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9853 break;
9854 case BUILT_IN_MEMMOVE_CHK:
9855 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9856 break;
9857 case BUILT_IN_MEMSET_CHK:
9858 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9859 break;
9860 default:
9861 break;
9864 if (! fn)
9865 return NULL_RTX;
9867 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9868 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9869 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9870 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9872 else if (fcode == BUILT_IN_MEMSET_CHK)
9873 return NULL_RTX;
9874 else
9876 unsigned int dest_align = get_pointer_alignment (dest);
9878 /* If DEST is not a pointer type, call the normal function. */
9879 if (dest_align == 0)
9880 return NULL_RTX;
9882 /* If SRC and DEST are the same (and not volatile), do nothing. */
9883 if (operand_equal_p (src, dest, 0))
9885 tree expr;
9887 if (fcode != BUILT_IN_MEMPCPY_CHK)
9889 /* Evaluate and ignore LEN in case it has side-effects. */
9890 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9891 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9894 expr = fold_build_pointer_plus (dest, len);
9895 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9898 /* __memmove_chk special case. */
9899 if (fcode == BUILT_IN_MEMMOVE_CHK)
9901 unsigned int src_align = get_pointer_alignment (src);
9903 if (src_align == 0)
9904 return NULL_RTX;
9906 /* If src is categorized for a readonly section we can use
9907 normal __memcpy_chk. */
9908 if (readonly_data_expr (src))
9910 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9911 if (!fn)
9912 return NULL_RTX;
9913 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9914 dest, src, len, size);
9915 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9916 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9917 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9920 return NULL_RTX;
9924 /* Emit warning if a buffer overflow is detected at compile time. */
9926 static void
9927 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9929 /* The source string. */
9930 tree srcstr = NULL_TREE;
9931 /* The size of the destination object. */
9932 tree objsize = NULL_TREE;
9933 /* The string that is being concatenated with (as in __strcat_chk)
9934 or null if it isn't. */
9935 tree catstr = NULL_TREE;
9936 /* The maximum length of the source sequence in a bounded operation
9937 (such as __strncat_chk) or null if the operation isn't bounded
9938 (such as __strcat_chk). */
9939 tree maxread = NULL_TREE;
9940 /* The exact size of the access (such as in __strncpy_chk). */
9941 tree size = NULL_TREE;
9943 switch (fcode)
9945 case BUILT_IN_STRCPY_CHK:
9946 case BUILT_IN_STPCPY_CHK:
9947 srcstr = CALL_EXPR_ARG (exp, 1);
9948 objsize = CALL_EXPR_ARG (exp, 2);
9949 break;
9951 case BUILT_IN_STRCAT_CHK:
9952 /* For __strcat_chk the warning will be emitted only if overflowing
9953 by at least strlen (dest) + 1 bytes. */
9954 catstr = CALL_EXPR_ARG (exp, 0);
9955 srcstr = CALL_EXPR_ARG (exp, 1);
9956 objsize = CALL_EXPR_ARG (exp, 2);
9957 break;
9959 case BUILT_IN_STRNCAT_CHK:
9960 catstr = CALL_EXPR_ARG (exp, 0);
9961 srcstr = CALL_EXPR_ARG (exp, 1);
9962 maxread = CALL_EXPR_ARG (exp, 2);
9963 objsize = CALL_EXPR_ARG (exp, 3);
9964 break;
9966 case BUILT_IN_STRNCPY_CHK:
9967 case BUILT_IN_STPNCPY_CHK:
9968 srcstr = CALL_EXPR_ARG (exp, 1);
9969 size = CALL_EXPR_ARG (exp, 2);
9970 objsize = CALL_EXPR_ARG (exp, 3);
9971 break;
9973 case BUILT_IN_SNPRINTF_CHK:
9974 case BUILT_IN_VSNPRINTF_CHK:
9975 maxread = CALL_EXPR_ARG (exp, 1);
9976 objsize = CALL_EXPR_ARG (exp, 3);
9977 break;
9978 default:
9979 gcc_unreachable ();
9982 if (catstr && maxread)
9984 /* Check __strncat_chk. There is no way to determine the length
9985 of the string to which the source string is being appended so
9986 just warn when the length of the source string is not known. */
9987 check_strncat_sizes (exp, objsize);
9988 return;
9991 /* The destination argument is the first one for all built-ins above. */
9992 tree dst = CALL_EXPR_ARG (exp, 0);
9994 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
9997 /* Emit warning if a buffer overflow is detected at compile time
9998 in __sprintf_chk/__vsprintf_chk calls. */
10000 static void
10001 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10003 tree size, len, fmt;
10004 const char *fmt_str;
10005 int nargs = call_expr_nargs (exp);
10007 /* Verify the required arguments in the original call. */
10009 if (nargs < 4)
10010 return;
10011 size = CALL_EXPR_ARG (exp, 2);
10012 fmt = CALL_EXPR_ARG (exp, 3);
10014 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10015 return;
10017 /* Check whether the format is a literal string constant. */
10018 fmt_str = c_getstr (fmt);
10019 if (fmt_str == NULL)
10020 return;
10022 if (!init_target_chars ())
10023 return;
10025 /* If the format doesn't contain % args or %%, we know its size. */
10026 if (strchr (fmt_str, target_percent) == 0)
10027 len = build_int_cstu (size_type_node, strlen (fmt_str));
10028 /* If the format is "%s" and first ... argument is a string literal,
10029 we know it too. */
10030 else if (fcode == BUILT_IN_SPRINTF_CHK
10031 && strcmp (fmt_str, target_percent_s) == 0)
10033 tree arg;
10035 if (nargs < 5)
10036 return;
10037 arg = CALL_EXPR_ARG (exp, 4);
10038 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10039 return;
10041 len = c_strlen (arg, 1);
10042 if (!len || ! tree_fits_uhwi_p (len))
10043 return;
10045 else
10046 return;
10048 /* Add one for the terminating nul. */
10049 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10051 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10052 /*maxread=*/NULL_TREE, len, size);
10055 /* Emit warning if a free is called with address of a variable. */
10057 static void
10058 maybe_emit_free_warning (tree exp)
10060 tree arg = CALL_EXPR_ARG (exp, 0);
10062 STRIP_NOPS (arg);
10063 if (TREE_CODE (arg) != ADDR_EXPR)
10064 return;
10066 arg = get_base_address (TREE_OPERAND (arg, 0));
10067 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10068 return;
10070 if (SSA_VAR_P (arg))
10071 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10072 "%Kattempt to free a non-heap object %qD", exp, arg);
10073 else
10074 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10075 "%Kattempt to free a non-heap object", exp);
10078 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10079 if possible. */
10081 static tree
10082 fold_builtin_object_size (tree ptr, tree ost)
10084 unsigned HOST_WIDE_INT bytes;
10085 int object_size_type;
10087 if (!validate_arg (ptr, POINTER_TYPE)
10088 || !validate_arg (ost, INTEGER_TYPE))
10089 return NULL_TREE;
10091 STRIP_NOPS (ost);
10093 if (TREE_CODE (ost) != INTEGER_CST
10094 || tree_int_cst_sgn (ost) < 0
10095 || compare_tree_int (ost, 3) > 0)
10096 return NULL_TREE;
10098 object_size_type = tree_to_shwi (ost);
10100 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10101 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10102 and (size_t) 0 for types 2 and 3. */
10103 if (TREE_SIDE_EFFECTS (ptr))
10104 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10106 if (TREE_CODE (ptr) == ADDR_EXPR)
10108 compute_builtin_object_size (ptr, object_size_type, &bytes);
10109 if (wi::fits_to_tree_p (bytes, size_type_node))
10110 return build_int_cstu (size_type_node, bytes);
10112 else if (TREE_CODE (ptr) == SSA_NAME)
10114 /* If object size is not known yet, delay folding until
10115 later. Maybe subsequent passes will help determining
10116 it. */
10117 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10118 && wi::fits_to_tree_p (bytes, size_type_node))
10119 return build_int_cstu (size_type_node, bytes);
10122 return NULL_TREE;
10125 /* Builtins with folding operations that operate on "..." arguments
10126 need special handling; we need to store the arguments in a convenient
10127 data structure before attempting any folding. Fortunately there are
10128 only a few builtins that fall into this category. FNDECL is the
10129 function, EXP is the CALL_EXPR for the call. */
10131 static tree
10132 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10134 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10135 tree ret = NULL_TREE;
10137 switch (fcode)
10139 case BUILT_IN_FPCLASSIFY:
10140 ret = fold_builtin_fpclassify (loc, args, nargs);
10141 break;
10143 default:
10144 break;
10146 if (ret)
10148 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10149 SET_EXPR_LOCATION (ret, loc);
10150 TREE_NO_WARNING (ret) = 1;
10151 return ret;
10153 return NULL_TREE;
10156 /* Initialize format string characters in the target charset. */
10158 bool
10159 init_target_chars (void)
10161 static bool init;
10162 if (!init)
10164 target_newline = lang_hooks.to_target_charset ('\n');
10165 target_percent = lang_hooks.to_target_charset ('%');
10166 target_c = lang_hooks.to_target_charset ('c');
10167 target_s = lang_hooks.to_target_charset ('s');
10168 if (target_newline == 0 || target_percent == 0 || target_c == 0
10169 || target_s == 0)
10170 return false;
10172 target_percent_c[0] = target_percent;
10173 target_percent_c[1] = target_c;
10174 target_percent_c[2] = '\0';
10176 target_percent_s[0] = target_percent;
10177 target_percent_s[1] = target_s;
10178 target_percent_s[2] = '\0';
10180 target_percent_s_newline[0] = target_percent;
10181 target_percent_s_newline[1] = target_s;
10182 target_percent_s_newline[2] = target_newline;
10183 target_percent_s_newline[3] = '\0';
10185 init = true;
10187 return true;
10190 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10191 and no overflow/underflow occurred. INEXACT is true if M was not
10192 exactly calculated. TYPE is the tree type for the result. This
10193 function assumes that you cleared the MPFR flags and then
10194 calculated M to see if anything subsequently set a flag prior to
10195 entering this function. Return NULL_TREE if any checks fail. */
10197 static tree
10198 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10200 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10201 overflow/underflow occurred. If -frounding-math, proceed iff the
10202 result of calling FUNC was exact. */
10203 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10204 && (!flag_rounding_math || !inexact))
10206 REAL_VALUE_TYPE rr;
10208 real_from_mpfr (&rr, m, type, GMP_RNDN);
10209 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10210 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10211 but the mpft_t is not, then we underflowed in the
10212 conversion. */
10213 if (real_isfinite (&rr)
10214 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10216 REAL_VALUE_TYPE rmode;
10218 real_convert (&rmode, TYPE_MODE (type), &rr);
10219 /* Proceed iff the specified mode can hold the value. */
10220 if (real_identical (&rmode, &rr))
10221 return build_real (type, rmode);
10224 return NULL_TREE;
10227 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10228 number and no overflow/underflow occurred. INEXACT is true if M
10229 was not exactly calculated. TYPE is the tree type for the result.
10230 This function assumes that you cleared the MPFR flags and then
10231 calculated M to see if anything subsequently set a flag prior to
10232 entering this function. Return NULL_TREE if any checks fail, if
10233 FORCE_CONVERT is true, then bypass the checks. */
10235 static tree
10236 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10238 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10239 overflow/underflow occurred. If -frounding-math, proceed iff the
10240 result of calling FUNC was exact. */
10241 if (force_convert
10242 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10243 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10244 && (!flag_rounding_math || !inexact)))
10246 REAL_VALUE_TYPE re, im;
10248 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10249 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10250 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10251 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10252 but the mpft_t is not, then we underflowed in the
10253 conversion. */
10254 if (force_convert
10255 || (real_isfinite (&re) && real_isfinite (&im)
10256 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10257 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10259 REAL_VALUE_TYPE re_mode, im_mode;
10261 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10262 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10263 /* Proceed iff the specified mode can hold the value. */
10264 if (force_convert
10265 || (real_identical (&re_mode, &re)
10266 && real_identical (&im_mode, &im)))
10267 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10268 build_real (TREE_TYPE (type), im_mode));
10271 return NULL_TREE;
10274 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10275 the pointer *(ARG_QUO) and return the result. The type is taken
10276 from the type of ARG0 and is used for setting the precision of the
10277 calculation and results. */
10279 static tree
10280 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10282 tree const type = TREE_TYPE (arg0);
10283 tree result = NULL_TREE;
10285 STRIP_NOPS (arg0);
10286 STRIP_NOPS (arg1);
10288 /* To proceed, MPFR must exactly represent the target floating point
10289 format, which only happens when the target base equals two. */
10290 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10291 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10292 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10294 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10295 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10297 if (real_isfinite (ra0) && real_isfinite (ra1))
10299 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10300 const int prec = fmt->p;
10301 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10302 tree result_rem;
10303 long integer_quo;
10304 mpfr_t m0, m1;
10306 mpfr_inits2 (prec, m0, m1, NULL);
10307 mpfr_from_real (m0, ra0, GMP_RNDN);
10308 mpfr_from_real (m1, ra1, GMP_RNDN);
10309 mpfr_clear_flags ();
10310 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10311 /* Remquo is independent of the rounding mode, so pass
10312 inexact=0 to do_mpfr_ckconv(). */
10313 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10314 mpfr_clears (m0, m1, NULL);
10315 if (result_rem)
10317 /* MPFR calculates quo in the host's long so it may
10318 return more bits in quo than the target int can hold
10319 if sizeof(host long) > sizeof(target int). This can
10320 happen even for native compilers in LP64 mode. In
10321 these cases, modulo the quo value with the largest
10322 number that the target int can hold while leaving one
10323 bit for the sign. */
10324 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10325 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10327 /* Dereference the quo pointer argument. */
10328 arg_quo = build_fold_indirect_ref (arg_quo);
10329 /* Proceed iff a valid pointer type was passed in. */
10330 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10332 /* Set the value. */
10333 tree result_quo
10334 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10335 build_int_cst (TREE_TYPE (arg_quo),
10336 integer_quo));
10337 TREE_SIDE_EFFECTS (result_quo) = 1;
10338 /* Combine the quo assignment with the rem. */
10339 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10340 result_quo, result_rem));
10345 return result;
10348 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10349 resulting value as a tree with type TYPE. The mpfr precision is
10350 set to the precision of TYPE. We assume that this mpfr function
10351 returns zero if the result could be calculated exactly within the
10352 requested precision. In addition, the integer pointer represented
10353 by ARG_SG will be dereferenced and set to the appropriate signgam
10354 (-1,1) value. */
10356 static tree
10357 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10359 tree result = NULL_TREE;
10361 STRIP_NOPS (arg);
10363 /* To proceed, MPFR must exactly represent the target floating point
10364 format, which only happens when the target base equals two. Also
10365 verify ARG is a constant and that ARG_SG is an int pointer. */
10366 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10367 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10368 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10369 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10371 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10373 /* In addition to NaN and Inf, the argument cannot be zero or a
10374 negative integer. */
10375 if (real_isfinite (ra)
10376 && ra->cl != rvc_zero
10377 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10379 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10380 const int prec = fmt->p;
10381 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10382 int inexact, sg;
10383 mpfr_t m;
10384 tree result_lg;
10386 mpfr_init2 (m, prec);
10387 mpfr_from_real (m, ra, GMP_RNDN);
10388 mpfr_clear_flags ();
10389 inexact = mpfr_lgamma (m, &sg, m, rnd);
10390 result_lg = do_mpfr_ckconv (m, type, inexact);
10391 mpfr_clear (m);
10392 if (result_lg)
10394 tree result_sg;
10396 /* Dereference the arg_sg pointer argument. */
10397 arg_sg = build_fold_indirect_ref (arg_sg);
10398 /* Assign the signgam value into *arg_sg. */
10399 result_sg = fold_build2 (MODIFY_EXPR,
10400 TREE_TYPE (arg_sg), arg_sg,
10401 build_int_cst (TREE_TYPE (arg_sg), sg));
10402 TREE_SIDE_EFFECTS (result_sg) = 1;
10403 /* Combine the signgam assignment with the lgamma result. */
10404 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10405 result_sg, result_lg));
10410 return result;
10413 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10414 mpc function FUNC on it and return the resulting value as a tree
10415 with type TYPE. The mpfr precision is set to the precision of
10416 TYPE. We assume that function FUNC returns zero if the result
10417 could be calculated exactly within the requested precision. If
10418 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10419 in the arguments and/or results. */
10421 tree
10422 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10423 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10425 tree result = NULL_TREE;
10427 STRIP_NOPS (arg0);
10428 STRIP_NOPS (arg1);
10430 /* To proceed, MPFR must exactly represent the target floating point
10431 format, which only happens when the target base equals two. */
10432 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10434 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10435 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10436 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10438 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10439 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10440 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10441 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10443 if (do_nonfinite
10444 || (real_isfinite (re0) && real_isfinite (im0)
10445 && real_isfinite (re1) && real_isfinite (im1)))
10447 const struct real_format *const fmt =
10448 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10449 const int prec = fmt->p;
10450 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10451 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10452 int inexact;
10453 mpc_t m0, m1;
10455 mpc_init2 (m0, prec);
10456 mpc_init2 (m1, prec);
10457 mpfr_from_real (mpc_realref (m0), re0, rnd);
10458 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10459 mpfr_from_real (mpc_realref (m1), re1, rnd);
10460 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10461 mpfr_clear_flags ();
10462 inexact = func (m0, m0, m1, crnd);
10463 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10464 mpc_clear (m0);
10465 mpc_clear (m1);
10469 return result;
10472 /* A wrapper function for builtin folding that prevents warnings for
10473 "statement without effect" and the like, caused by removing the
10474 call node earlier than the warning is generated. */
10476 tree
10477 fold_call_stmt (gcall *stmt, bool ignore)
10479 tree ret = NULL_TREE;
10480 tree fndecl = gimple_call_fndecl (stmt);
10481 location_t loc = gimple_location (stmt);
10482 if (fndecl
10483 && TREE_CODE (fndecl) == FUNCTION_DECL
10484 && DECL_BUILT_IN (fndecl)
10485 && !gimple_call_va_arg_pack_p (stmt))
10487 int nargs = gimple_call_num_args (stmt);
10488 tree *args = (nargs > 0
10489 ? gimple_call_arg_ptr (stmt, 0)
10490 : &error_mark_node);
10492 if (avoid_folding_inline_builtin (fndecl))
10493 return NULL_TREE;
10494 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10496 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10498 else
10500 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10501 if (ret)
10503 /* Propagate location information from original call to
10504 expansion of builtin. Otherwise things like
10505 maybe_emit_chk_warning, that operate on the expansion
10506 of a builtin, will use the wrong location information. */
10507 if (gimple_has_location (stmt))
10509 tree realret = ret;
10510 if (TREE_CODE (ret) == NOP_EXPR)
10511 realret = TREE_OPERAND (ret, 0);
10512 if (CAN_HAVE_LOCATION_P (realret)
10513 && !EXPR_HAS_LOCATION (realret))
10514 SET_EXPR_LOCATION (realret, loc);
10515 return realret;
10517 return ret;
10521 return NULL_TREE;
10524 /* Look up the function in builtin_decl that corresponds to DECL
10525 and set ASMSPEC as its user assembler name. DECL must be a
10526 function decl that declares a builtin. */
10528 void
10529 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10531 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10532 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10533 && asmspec != 0);
10535 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10536 set_user_assembler_name (builtin, asmspec);
10538 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10539 && INT_TYPE_SIZE < BITS_PER_WORD)
10541 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10542 set_user_assembler_libfunc ("ffs", asmspec);
10543 set_optab_libfunc (ffs_optab, mode, "ffs");
10547 /* Return true if DECL is a builtin that expands to a constant or similarly
10548 simple code. */
10549 bool
10550 is_simple_builtin (tree decl)
10552 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10553 switch (DECL_FUNCTION_CODE (decl))
10555 /* Builtins that expand to constants. */
10556 case BUILT_IN_CONSTANT_P:
10557 case BUILT_IN_EXPECT:
10558 case BUILT_IN_OBJECT_SIZE:
10559 case BUILT_IN_UNREACHABLE:
10560 /* Simple register moves or loads from stack. */
10561 case BUILT_IN_ASSUME_ALIGNED:
10562 case BUILT_IN_RETURN_ADDRESS:
10563 case BUILT_IN_EXTRACT_RETURN_ADDR:
10564 case BUILT_IN_FROB_RETURN_ADDR:
10565 case BUILT_IN_RETURN:
10566 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10567 case BUILT_IN_FRAME_ADDRESS:
10568 case BUILT_IN_VA_END:
10569 case BUILT_IN_STACK_SAVE:
10570 case BUILT_IN_STACK_RESTORE:
10571 /* Exception state returns or moves registers around. */
10572 case BUILT_IN_EH_FILTER:
10573 case BUILT_IN_EH_POINTER:
10574 case BUILT_IN_EH_COPY_VALUES:
10575 return true;
10577 default:
10578 return false;
10581 return false;
10584 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10585 most probably expanded inline into reasonably simple code. This is a
10586 superset of is_simple_builtin. */
10587 bool
10588 is_inexpensive_builtin (tree decl)
10590 if (!decl)
10591 return false;
10592 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10593 return true;
10594 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10595 switch (DECL_FUNCTION_CODE (decl))
10597 case BUILT_IN_ABS:
10598 CASE_BUILT_IN_ALLOCA:
10599 case BUILT_IN_BSWAP16:
10600 case BUILT_IN_BSWAP32:
10601 case BUILT_IN_BSWAP64:
10602 case BUILT_IN_CLZ:
10603 case BUILT_IN_CLZIMAX:
10604 case BUILT_IN_CLZL:
10605 case BUILT_IN_CLZLL:
10606 case BUILT_IN_CTZ:
10607 case BUILT_IN_CTZIMAX:
10608 case BUILT_IN_CTZL:
10609 case BUILT_IN_CTZLL:
10610 case BUILT_IN_FFS:
10611 case BUILT_IN_FFSIMAX:
10612 case BUILT_IN_FFSL:
10613 case BUILT_IN_FFSLL:
10614 case BUILT_IN_IMAXABS:
10615 case BUILT_IN_FINITE:
10616 case BUILT_IN_FINITEF:
10617 case BUILT_IN_FINITEL:
10618 case BUILT_IN_FINITED32:
10619 case BUILT_IN_FINITED64:
10620 case BUILT_IN_FINITED128:
10621 case BUILT_IN_FPCLASSIFY:
10622 case BUILT_IN_ISFINITE:
10623 case BUILT_IN_ISINF_SIGN:
10624 case BUILT_IN_ISINF:
10625 case BUILT_IN_ISINFF:
10626 case BUILT_IN_ISINFL:
10627 case BUILT_IN_ISINFD32:
10628 case BUILT_IN_ISINFD64:
10629 case BUILT_IN_ISINFD128:
10630 case BUILT_IN_ISNAN:
10631 case BUILT_IN_ISNANF:
10632 case BUILT_IN_ISNANL:
10633 case BUILT_IN_ISNAND32:
10634 case BUILT_IN_ISNAND64:
10635 case BUILT_IN_ISNAND128:
10636 case BUILT_IN_ISNORMAL:
10637 case BUILT_IN_ISGREATER:
10638 case BUILT_IN_ISGREATEREQUAL:
10639 case BUILT_IN_ISLESS:
10640 case BUILT_IN_ISLESSEQUAL:
10641 case BUILT_IN_ISLESSGREATER:
10642 case BUILT_IN_ISUNORDERED:
10643 case BUILT_IN_VA_ARG_PACK:
10644 case BUILT_IN_VA_ARG_PACK_LEN:
10645 case BUILT_IN_VA_COPY:
10646 case BUILT_IN_TRAP:
10647 case BUILT_IN_SAVEREGS:
10648 case BUILT_IN_POPCOUNTL:
10649 case BUILT_IN_POPCOUNTLL:
10650 case BUILT_IN_POPCOUNTIMAX:
10651 case BUILT_IN_POPCOUNT:
10652 case BUILT_IN_PARITYL:
10653 case BUILT_IN_PARITYLL:
10654 case BUILT_IN_PARITYIMAX:
10655 case BUILT_IN_PARITY:
10656 case BUILT_IN_LABS:
10657 case BUILT_IN_LLABS:
10658 case BUILT_IN_PREFETCH:
10659 case BUILT_IN_ACC_ON_DEVICE:
10660 return true;
10662 default:
10663 return is_simple_builtin (decl);
10666 return false;
10669 /* Return true if T is a constant and the value cast to a target char
10670 can be represented by a host char.
10671 Store the casted char constant in *P if so. */
10673 bool
10674 target_char_cst_p (tree t, char *p)
10676 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10677 return false;
10679 *p = (char)tree_to_uhwi (t);
10680 return true;
10683 /* Return the maximum object size. */
10685 tree
10686 max_object_size (void)
10688 /* To do: Make this a configurable parameter. */
10689 return TYPE_MAX_VALUE (ptrdiff_type_node);