compiler: enable escape analysis for runtime
[official-gcc.git] / gcc / builtins.c
blobd9f1e476cb001657b43353b6fd28bffccb20fb33
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
210 /* Return true if DECL is a function symbol representing a built-in. */
212 bool
213 is_builtin_fn (tree decl)
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
222 bool
223 called_as_built_in (tree node)
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 poly_int64 bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep);
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
274 else if (TREE_CODE (exp) == LABEL_DECL)
276 else if (TREE_CODE (exp) == CONST_DECL)
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = targetm.constant_alignment (exp, align);
284 known_alignment = true;
286 else if (DECL_P (exp))
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
291 else if (TREE_CODE (exp) == INDIRECT_REF
292 || TREE_CODE (exp) == MEM_REF
293 || TREE_CODE (exp) == TARGET_MEM_REF)
295 tree addr = TREE_OPERAND (exp, 0);
296 unsigned ptr_align;
297 unsigned HOST_WIDE_INT ptr_bitpos;
298 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
305 ptr_bitmask *= BITS_PER_UNIT;
306 align = least_bit_hwi (ptr_bitmask);
307 addr = TREE_OPERAND (addr, 0);
310 known_alignment
311 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
312 align = MAX (ptr_align, align);
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos &= ptr_bitmask;
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 if (TMR_INDEX (exp))
323 unsigned HOST_WIDE_INT step = 1;
324 if (TMR_STEP (exp))
325 step = TREE_INT_CST_LOW (TMR_STEP (exp));
326 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 if (TMR_INDEX2 (exp))
329 align = BITS_PER_UNIT;
330 known_alignment = false;
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
338 unsigned int talign;
339 if (!addr_p && !known_alignment
340 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
341 && talign > align)
342 align = talign;
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = targetm.constant_alignment (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 /* Account for the alignment of runtime coefficients, so that the constant
377 bitpos is guaranteed to be accurate. */
378 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
379 if (alt_align != 0 && alt_align < align)
381 align = alt_align;
382 known_alignment = false;
385 *alignp = align;
386 *bitposp = bitpos.coeffs[0] & (align - 1);
387 return known_alignment;
390 /* For a memory reference expression EXP compute values M and N such that M
391 divides (&EXP - N) and such that N < M. If these numbers can be determined,
392 store M in alignp and N in *BITPOSP and return true. Otherwise return false
393 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395 bool
396 get_object_alignment_1 (tree exp, unsigned int *alignp,
397 unsigned HOST_WIDE_INT *bitposp)
399 return get_object_alignment_2 (exp, alignp, bitposp, false);
402 /* Return the alignment in bits of EXP, an object. */
404 unsigned int
405 get_object_alignment (tree exp)
407 unsigned HOST_WIDE_INT bitpos = 0;
408 unsigned int align;
410 get_object_alignment_1 (exp, &align, &bitpos);
412 /* align and bitpos now specify known low bits of the pointer.
413 ptr & (align - 1) == bitpos. */
415 if (bitpos != 0)
416 align = least_bit_hwi (bitpos);
417 return align;
420 /* For a pointer valued expression EXP compute values M and N such that M
421 divides (EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Return false if
423 the results are just a conservative approximation.
425 If EXP is not a pointer, false is returned too. */
427 bool
428 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
429 unsigned HOST_WIDE_INT *bitposp)
431 STRIP_NOPS (exp);
433 if (TREE_CODE (exp) == ADDR_EXPR)
434 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
435 alignp, bitposp, true);
436 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 unsigned int align;
439 unsigned HOST_WIDE_INT bitpos;
440 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
441 &align, &bitpos);
442 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
443 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
444 else
446 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
447 if (trailing_zeros < HOST_BITS_PER_INT)
449 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
450 if (inner)
451 align = MIN (align, inner);
454 *alignp = align;
455 *bitposp = bitpos & (align - 1);
456 return res;
458 else if (TREE_CODE (exp) == SSA_NAME
459 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 unsigned int ptr_align, ptr_misalign;
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 *bitposp = ptr_misalign * BITS_PER_UNIT;
467 *alignp = ptr_align * BITS_PER_UNIT;
468 /* Make sure to return a sensible alignment when the multiplication
469 by BITS_PER_UNIT overflowed. */
470 if (*alignp == 0)
471 *alignp = 1u << (HOST_BITS_PER_INT - 1);
472 /* We cannot really tell whether this result is an approximation. */
473 return false;
475 else
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 else if (TREE_CODE (exp) == INTEGER_CST)
484 *alignp = BIGGEST_ALIGNMENT;
485 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
486 & (BIGGEST_ALIGNMENT - 1));
487 return true;
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
502 unsigned int
503 get_pointer_alignment (tree exp)
505 unsigned HOST_WIDE_INT bitpos = 0;
506 unsigned int align;
508 get_pointer_alignment_1 (exp, &align, &bitpos);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
513 if (bitpos != 0)
514 align = least_bit_hwi (bitpos);
516 return align;
519 /* Return the number of non-zero elements in the sequence
520 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
521 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523 static unsigned
524 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528 unsigned n;
530 if (eltsize == 1)
532 /* Optimize the common case of plain char. */
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n;
536 if (!*elt)
537 break;
540 else
542 for (n = 0; n < maxelts; n++)
544 const char *elt = (const char*) ptr + n * eltsize;
545 if (!memcmp (elt, "\0\0\0\0", eltsize))
546 break;
549 return n;
552 /* Compute the length of a null-terminated character string or wide
553 character string handling character sizes of 1, 2, and 4 bytes.
554 TREE_STRING_LENGTH is not the right way because it evaluates to
555 the size of the character array in bytes (as opposed to characters)
556 and because it can contain a zero byte in the middle.
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
565 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
566 accesses. Note that this implies the result is not going to be emitted
567 into the instruction stream.
569 The value returned is of type `ssizetype'.
571 Unfortunately, string_constant can't access the values of const char
572 arrays with initializers, so neither can we do so here. */
574 tree
575 c_strlen (tree src, int only_value)
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 tree len1, len2;
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
593 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595 /* Offset from the beginning of the string in bytes. */
596 tree byteoff;
597 src = string_constant (src, &byteoff);
598 if (src == 0)
599 return NULL_TREE;
601 /* Determine the size of the string element. */
602 unsigned eltsize
603 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
606 length of SRC. */
607 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609 /* PTR can point to the byte representation of any string type, including
610 char* and wchar_t*. */
611 const char *ptr = TREE_STRING_POINTER (src);
613 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
615 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
616 compute the offset to the following null if we don't know where to
617 start searching for it. */
618 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 /* Return when an embedded null character is found. */
621 return NULL_TREE;
624 if (!maxelts)
625 return ssize_int (0);
627 /* We don't know the starting offset, but we do know that the string
628 has no internal zero bytes. We can assume that the offset falls
629 within the bounds of the string; otherwise, the programmer deserves
630 what he gets. Subtract the offset from the length of the string,
631 and return that. This would perhaps not be valid if we were dealing
632 with named arrays in addition to literal string constants. */
634 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
637 /* Offset from the beginning of the string in elements. */
638 HOST_WIDE_INT eltoff;
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (byteoff == 0)
643 eltoff = 0;
644 else if (! tree_fits_shwi_p (byteoff))
645 eltoff = -1;
646 else
647 eltoff = tree_to_shwi (byteoff) / eltsize;
649 /* If the offset is known to be out of bounds, warn, and call strlen at
650 runtime. */
651 if (eltoff < 0 || eltoff > maxelts)
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (only_value != 2
655 && !TREE_NO_WARNING (src))
657 warning_at (loc, OPT_Warray_bounds,
658 "offset %qwi outside bounds of constant string",
659 eltoff);
660 TREE_NO_WARNING (src) = 1;
662 return NULL_TREE;
665 /* Use strlen to search for the first zero byte. Since any strings
666 constructed with build_string will have nulls appended, we win even
667 if we get handed something like (char[4])"abcd".
669 Since ELTOFF is our starting index into the string, no further
670 calculation is needed. */
671 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
672 maxelts - eltoff);
674 return ssize_int (len);
677 /* Return a constant integer corresponding to target reading
678 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
680 static rtx
681 c_readstr (const char *str, scalar_int_mode mode)
683 HOST_WIDE_INT ch;
684 unsigned int i, j;
685 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
687 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
688 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
689 / HOST_BITS_PER_WIDE_INT;
691 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
692 for (i = 0; i < len; i++)
693 tmp[i] = 0;
695 ch = 1;
696 for (i = 0; i < GET_MODE_SIZE (mode); i++)
698 j = i;
699 if (WORDS_BIG_ENDIAN)
700 j = GET_MODE_SIZE (mode) - i - 1;
701 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
702 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
703 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
704 j *= BITS_PER_UNIT;
706 if (ch)
707 ch = (unsigned char) str[i];
708 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
711 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
712 return immed_wide_int_const (c, mode);
715 /* Cast a target constant CST to target CHAR and if that value fits into
716 host char type, return zero and put that value into variable pointed to by
717 P. */
719 static int
720 target_char_cast (tree cst, char *p)
722 unsigned HOST_WIDE_INT val, hostval;
724 if (TREE_CODE (cst) != INTEGER_CST
725 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
726 return 1;
728 /* Do not care if it fits or not right here. */
729 val = TREE_INT_CST_LOW (cst);
731 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
732 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
734 hostval = val;
735 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
736 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
738 if (val != hostval)
739 return 1;
741 *p = hostval;
742 return 0;
745 /* Similar to save_expr, but assumes that arbitrary code is not executed
746 in between the multiple evaluations. In particular, we assume that a
747 non-addressable local variable will not be modified. */
749 static tree
750 builtin_save_expr (tree exp)
752 if (TREE_CODE (exp) == SSA_NAME
753 || (TREE_ADDRESSABLE (exp) == 0
754 && (TREE_CODE (exp) == PARM_DECL
755 || (VAR_P (exp) && !TREE_STATIC (exp)))))
756 return exp;
758 return save_expr (exp);
761 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
762 times to get the address of either a higher stack frame, or a return
763 address located within it (depending on FNDECL_CODE). */
765 static rtx
766 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
768 int i;
769 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
770 if (tem == NULL_RTX)
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
785 tem = hard_frame_pointer_rtx;
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 count--;
802 /* Scan back COUNT frames to the specified frame. */
803 for (i = 0; i < count; i++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 tem = DYNAMIC_CHAIN_ADDRESS (tem);
808 tem = memory_address (Pmode, tem);
809 tem = gen_frame_mem (Pmode, tem);
810 tem = copy_to_reg (tem);
813 /* For __builtin_frame_address, return what we've got. But, on
814 the SPARC for example, we may have to add a bias. */
815 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
816 return FRAME_ADDR_RTX (tem);
818 /* For __builtin_return_address, get the return address from that frame. */
819 #ifdef RETURN_ADDR_RTX
820 tem = RETURN_ADDR_RTX (count, tem);
821 #else
822 tem = memory_address (Pmode,
823 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
824 tem = gen_frame_mem (Pmode, tem);
825 #endif
826 return tem;
829 /* Alias set used for setjmp buffer. */
830 static alias_set_type setjmp_alias_set = -1;
832 /* Construct the leading half of a __builtin_setjmp call. Control will
833 return to RECEIVER_LABEL. This is also called directly by the SJLJ
834 exception handling code. */
836 void
837 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
839 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
840 rtx stack_save;
841 rtx mem;
843 if (setjmp_alias_set == -1)
844 setjmp_alias_set = new_alias_set ();
846 buf_addr = convert_memory_address (Pmode, buf_addr);
848 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
850 /* We store the frame pointer and the address of receiver_label in
851 the buffer and use the rest of it for the stack save area, which
852 is machine-dependent. */
854 mem = gen_rtx_MEM (Pmode, buf_addr);
855 set_mem_alias_set (mem, setjmp_alias_set);
856 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
858 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
859 GET_MODE_SIZE (Pmode))),
860 set_mem_alias_set (mem, setjmp_alias_set);
862 emit_move_insn (validize_mem (mem),
863 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
865 stack_save = gen_rtx_MEM (sa_mode,
866 plus_constant (Pmode, buf_addr,
867 2 * GET_MODE_SIZE (Pmode)));
868 set_mem_alias_set (stack_save, setjmp_alias_set);
869 emit_stack_save (SAVE_NONLOCAL, &stack_save);
871 /* If there is further processing to do, do it. */
872 if (targetm.have_builtin_setjmp_setup ())
873 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
875 /* We have a nonlocal label. */
876 cfun->has_nonlocal_label = 1;
879 /* Construct the trailing part of a __builtin_setjmp call. This is
880 also called directly by the SJLJ exception handling code.
881 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883 void
884 expand_builtin_setjmp_receiver (rtx receiver_label)
886 rtx chain;
888 /* Mark the FP as used when we get here, so we have to make sure it's
889 marked as used by this function. */
890 emit_use (hard_frame_pointer_rtx);
892 /* Mark the static chain as clobbered here so life information
893 doesn't get messed up for it. */
894 chain = rtx_for_static_chain (current_function_decl, true);
895 if (chain && REG_P (chain))
896 emit_clobber (chain);
898 /* Now put in the code to restore the frame pointer, and argument
899 pointer, if needed. */
900 if (! targetm.have_nonlocal_goto ())
902 /* First adjust our frame pointer to its actual value. It was
903 previously set to the start of the virtual area corresponding to
904 the stacked variables when we branched here and now needs to be
905 adjusted to the actual hardware fp value.
907 Assignments to virtual registers are converted by
908 instantiate_virtual_regs into the corresponding assignment
909 to the underlying register (fp in this case) that makes
910 the original assignment true.
911 So the following insn will actually be decrementing fp by
912 TARGET_STARTING_FRAME_OFFSET. */
913 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
915 /* Restoring the frame pointer also modifies the hard frame pointer.
916 Mark it used (so that the previous assignment remains live once
917 the frame pointer is eliminated) and clobbered (to represent the
918 implicit update from the assignment). */
919 emit_use (hard_frame_pointer_rtx);
920 emit_clobber (hard_frame_pointer_rtx);
923 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
925 /* If the argument pointer can be eliminated in favor of the
926 frame pointer, we don't need to restore it. We assume here
927 that if such an elimination is present, it can always be used.
928 This is the case on all known machines; if we don't make this
929 assumption, we do unnecessary saving on many machines. */
930 size_t i;
931 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
933 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
934 if (elim_regs[i].from == ARG_POINTER_REGNUM
935 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
936 break;
938 if (i == ARRAY_SIZE (elim_regs))
940 /* Now restore our arg pointer from the address at which it
941 was saved in our stack frame. */
942 emit_move_insn (crtl->args.internal_arg_pointer,
943 copy_to_reg (get_arg_pointer_save_area ()));
947 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
948 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
949 else if (targetm.have_nonlocal_goto_receiver ())
950 emit_insn (targetm.gen_nonlocal_goto_receiver ());
951 else
952 { /* Nothing */ }
954 /* We must not allow the code we just generated to be reordered by
955 scheduling. Specifically, the update of the frame pointer must
956 happen immediately, not later. */
957 emit_insn (gen_blockage ());
960 /* __builtin_longjmp is passed a pointer to an array of five words (not
961 all will be used on all machines). It operates similarly to the C
962 library function of the same name, but is more efficient. Much of
963 the code below is copied from the handling of non-local gotos. */
965 static void
966 expand_builtin_longjmp (rtx buf_addr, rtx value)
968 rtx fp, lab, stack;
969 rtx_insn *insn, *last;
970 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
972 /* DRAP is needed for stack realign if longjmp is expanded to current
973 function */
974 if (SUPPORTS_STACK_ALIGNMENT)
975 crtl->need_drap = true;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, buf_addr);
984 /* We require that the user must pass a second argument of 1, because
985 that is what builtin_setjmp will return. */
986 gcc_assert (value == const1_rtx);
988 last = get_last_insn ();
989 if (targetm.have_builtin_longjmp ())
990 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
991 else
993 fp = gen_rtx_MEM (Pmode, buf_addr);
994 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
995 GET_MODE_SIZE (Pmode)));
997 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
998 2 * GET_MODE_SIZE (Pmode)));
999 set_mem_alias_set (fp, setjmp_alias_set);
1000 set_mem_alias_set (lab, setjmp_alias_set);
1001 set_mem_alias_set (stack, setjmp_alias_set);
1003 /* Pick up FP, label, and SP from the block and jump. This code is
1004 from expand_goto in stmt.c; see there for detailed comments. */
1005 if (targetm.have_nonlocal_goto ())
1006 /* We have to pass a value to the nonlocal_goto pattern that will
1007 get copied into the static_chain pointer, but it does not matter
1008 what that value is, because builtin_setjmp does not use it. */
1009 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1010 else
1012 lab = copy_to_reg (lab);
1014 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1015 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1017 emit_move_insn (hard_frame_pointer_rtx, fp);
1018 emit_stack_restore (SAVE_NONLOCAL, stack);
1020 emit_use (hard_frame_pointer_rtx);
1021 emit_use (stack_pointer_rtx);
1022 emit_indirect_jump (lab);
1026 /* Search backwards and mark the jump insn as a non-local goto.
1027 Note that this precludes the use of __builtin_longjmp to a
1028 __builtin_setjmp target in the same function. However, we've
1029 already cautioned the user that these functions are for
1030 internal exception handling use only. */
1031 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1033 gcc_assert (insn != last);
1035 if (JUMP_P (insn))
1037 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1038 break;
1040 else if (CALL_P (insn))
1041 break;
1045 static inline bool
1046 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1048 return (iter->i < iter->n);
1051 /* This function validates the types of a function call argument list
1052 against a specified list of tree_codes. If the last specifier is a 0,
1053 that represents an ellipsis, otherwise the last specifier must be a
1054 VOID_TYPE. */
1056 static bool
1057 validate_arglist (const_tree callexpr, ...)
1059 enum tree_code code;
1060 bool res = 0;
1061 va_list ap;
1062 const_call_expr_arg_iterator iter;
1063 const_tree arg;
1065 va_start (ap, callexpr);
1066 init_const_call_expr_arg_iterator (callexpr, &iter);
1068 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1069 tree fn = CALL_EXPR_FN (callexpr);
1070 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1072 for (unsigned argno = 1; ; ++argno)
1074 code = (enum tree_code) va_arg (ap, int);
1076 switch (code)
1078 case 0:
1079 /* This signifies an ellipses, any further arguments are all ok. */
1080 res = true;
1081 goto end;
1082 case VOID_TYPE:
1083 /* This signifies an endlink, if no arguments remain, return
1084 true, otherwise return false. */
1085 res = !more_const_call_expr_args_p (&iter);
1086 goto end;
1087 case POINTER_TYPE:
1088 /* The actual argument must be nonnull when either the whole
1089 called function has been declared nonnull, or when the formal
1090 argument corresponding to the actual argument has been. */
1091 if (argmap
1092 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1094 arg = next_const_call_expr_arg (&iter);
1095 if (!validate_arg (arg, code) || integer_zerop (arg))
1096 goto end;
1097 break;
1099 /* FALLTHRU */
1100 default:
1101 /* If no parameters remain or the parameter's code does not
1102 match the specified code, return false. Otherwise continue
1103 checking any remaining arguments. */
1104 arg = next_const_call_expr_arg (&iter);
1105 if (!validate_arg (arg, code))
1106 goto end;
1107 break;
1111 /* We need gotos here since we can only have one VA_CLOSE in a
1112 function. */
1113 end: ;
1114 va_end (ap);
1116 BITMAP_FREE (argmap);
1118 return res;
1121 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1122 and the address of the save area. */
1124 static rtx
1125 expand_builtin_nonlocal_goto (tree exp)
1127 tree t_label, t_save_area;
1128 rtx r_label, r_save_area, r_fp, r_sp;
1129 rtx_insn *insn;
1131 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1132 return NULL_RTX;
1134 t_label = CALL_EXPR_ARG (exp, 0);
1135 t_save_area = CALL_EXPR_ARG (exp, 1);
1137 r_label = expand_normal (t_label);
1138 r_label = convert_memory_address (Pmode, r_label);
1139 r_save_area = expand_normal (t_save_area);
1140 r_save_area = convert_memory_address (Pmode, r_save_area);
1141 /* Copy the address of the save location to a register just in case it was
1142 based on the frame pointer. */
1143 r_save_area = copy_to_reg (r_save_area);
1144 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1145 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1146 plus_constant (Pmode, r_save_area,
1147 GET_MODE_SIZE (Pmode)));
1149 crtl->has_nonlocal_goto = 1;
1151 /* ??? We no longer need to pass the static chain value, afaik. */
1152 if (targetm.have_nonlocal_goto ())
1153 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1154 else
1156 r_label = copy_to_reg (r_label);
1158 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1159 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1161 /* Restore frame pointer for containing function. */
1162 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1163 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1165 /* USE of hard_frame_pointer_rtx added for consistency;
1166 not clear if really needed. */
1167 emit_use (hard_frame_pointer_rtx);
1168 emit_use (stack_pointer_rtx);
1170 /* If the architecture is using a GP register, we must
1171 conservatively assume that the target function makes use of it.
1172 The prologue of functions with nonlocal gotos must therefore
1173 initialize the GP register to the appropriate value, and we
1174 must then make sure that this value is live at the point
1175 of the jump. (Note that this doesn't necessarily apply
1176 to targets with a nonlocal_goto pattern; they are free
1177 to implement it in their own way. Note also that this is
1178 a no-op if the GP register is a global invariant.) */
1179 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1180 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1181 emit_use (pic_offset_table_rtx);
1183 emit_indirect_jump (r_label);
1186 /* Search backwards to the jump insn and mark it as a
1187 non-local goto. */
1188 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 if (JUMP_P (insn))
1192 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1193 break;
1195 else if (CALL_P (insn))
1196 break;
1199 return const0_rtx;
1202 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1203 (not all will be used on all machines) that was passed to __builtin_setjmp.
1204 It updates the stack pointer in that block to the current value. This is
1205 also called directly by the SJLJ exception handling code. */
1207 void
1208 expand_builtin_update_setjmp_buf (rtx buf_addr)
1210 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1211 buf_addr = convert_memory_address (Pmode, buf_addr);
1212 rtx stack_save
1213 = gen_rtx_MEM (sa_mode,
1214 memory_address
1215 (sa_mode,
1216 plus_constant (Pmode, buf_addr,
1217 2 * GET_MODE_SIZE (Pmode))));
1219 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1222 /* Expand a call to __builtin_prefetch. For a target that does not support
1223 data prefetch, evaluate the memory address argument in case it has side
1224 effects. */
1226 static void
1227 expand_builtin_prefetch (tree exp)
1229 tree arg0, arg1, arg2;
1230 int nargs;
1231 rtx op0, op1, op2;
1233 if (!validate_arglist (exp, POINTER_TYPE, 0))
1234 return;
1236 arg0 = CALL_EXPR_ARG (exp, 0);
1238 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1239 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1240 locality). */
1241 nargs = call_expr_nargs (exp);
1242 if (nargs > 1)
1243 arg1 = CALL_EXPR_ARG (exp, 1);
1244 else
1245 arg1 = integer_zero_node;
1246 if (nargs > 2)
1247 arg2 = CALL_EXPR_ARG (exp, 2);
1248 else
1249 arg2 = integer_three_node;
1251 /* Argument 0 is an address. */
1252 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1254 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1255 if (TREE_CODE (arg1) != INTEGER_CST)
1257 error ("second argument to %<__builtin_prefetch%> must be a constant");
1258 arg1 = integer_zero_node;
1260 op1 = expand_normal (arg1);
1261 /* Argument 1 must be either zero or one. */
1262 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1264 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1265 " using zero");
1266 op1 = const0_rtx;
1269 /* Argument 2 (locality) must be a compile-time constant int. */
1270 if (TREE_CODE (arg2) != INTEGER_CST)
1272 error ("third argument to %<__builtin_prefetch%> must be a constant");
1273 arg2 = integer_zero_node;
1275 op2 = expand_normal (arg2);
1276 /* Argument 2 must be 0, 1, 2, or 3. */
1277 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1279 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1280 op2 = const0_rtx;
1283 if (targetm.have_prefetch ())
1285 struct expand_operand ops[3];
1287 create_address_operand (&ops[0], op0);
1288 create_integer_operand (&ops[1], INTVAL (op1));
1289 create_integer_operand (&ops[2], INTVAL (op2));
1290 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1291 return;
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1355 /* Built-in functions to perform an untyped call and return. */
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1365 static int
1366 apply_args_size (void)
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1372 /* The values computed by this function never change. */
1373 if (size < 0)
1375 /* The first value is the incoming arg-pointer. */
1376 size = GET_MODE_SIZE (Pmode);
1378 /* The second value is the structure value address unless this is
1379 passed as an "invisible" first argument. */
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (FUNCTION_ARG_REGNO_P (regno))
1386 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1388 gcc_assert (mode != VOIDmode);
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_args_mode[regno] = mode;
1396 else
1398 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1401 return size;
1404 /* Return the size required for the block returned by __builtin_apply,
1405 and initialize apply_result_mode. */
1407 static int
1408 apply_result_size (void)
1410 static int size = -1;
1411 int align, regno;
1413 /* The values computed by this function never change. */
1414 if (size < 0)
1416 size = 0;
1418 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1419 if (targetm.calls.function_value_regno_p (regno))
1421 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1423 gcc_assert (mode != VOIDmode);
1425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1426 if (size % align != 0)
1427 size = CEIL (size, align) * align;
1428 size += GET_MODE_SIZE (mode);
1429 apply_result_mode[regno] = mode;
1431 else
1432 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1434 /* Allow targets that use untyped_call and untyped_return to override
1435 the size so that machine-specific information can be stored here. */
1436 #ifdef APPLY_RESULT_SIZE
1437 size = APPLY_RESULT_SIZE;
1438 #endif
1440 return size;
1443 /* Create a vector describing the result block RESULT. If SAVEP is true,
1444 the result block is used to save the values; otherwise it is used to
1445 restore the values. */
1447 static rtx
1448 result_vector (int savep, rtx result)
1450 int regno, size, align, nelts;
1451 fixed_size_mode mode;
1452 rtx reg, mem;
1453 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1455 size = nelts = 0;
1456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1457 if ((mode = apply_result_mode[regno]) != VOIDmode)
1459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1460 if (size % align != 0)
1461 size = CEIL (size, align) * align;
1462 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1463 mem = adjust_address (result, mode, size);
1464 savevec[nelts++] = (savep
1465 ? gen_rtx_SET (mem, reg)
1466 : gen_rtx_SET (reg, mem));
1467 size += GET_MODE_SIZE (mode);
1469 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1472 /* Save the state required to perform an untyped call with the same
1473 arguments as were passed to the current function. */
1475 static rtx
1476 expand_builtin_apply_args_1 (void)
1478 rtx registers, tem;
1479 int size, align, regno;
1480 fixed_size_mode mode;
1481 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483 /* Create a block where the arg-pointer, structure value address,
1484 and argument registers can be saved. */
1485 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1489 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Save each register used in calling a function to the block. */
1493 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1494 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1497 if (size % align != 0)
1498 size = CEIL (size, align) * align;
1500 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502 emit_move_insn (adjust_address (registers, mode, size), tem);
1503 size += GET_MODE_SIZE (mode);
1506 /* Save the arg pointer to the block. */
1507 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1508 /* We need the pointer as the caller actually passed them to us, not
1509 as we might have pretended they were passed. Make sure it's a valid
1510 operand, as emit_move_insn isn't expected to handle a PLUS. */
1511 if (STACK_GROWS_DOWNWARD)
1513 = force_operand (plus_constant (Pmode, tem,
1514 crtl->args.pretend_args_size),
1515 NULL_RTX);
1516 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518 size = GET_MODE_SIZE (Pmode);
1520 /* Save the structure value address unless this is passed as an
1521 "invisible" first argument. */
1522 if (struct_incoming_value)
1524 emit_move_insn (adjust_address (registers, Pmode, size),
1525 copy_to_reg (struct_incoming_value));
1526 size += GET_MODE_SIZE (Pmode);
1529 /* Return the address of the block. */
1530 return copy_addr_to_reg (XEXP (registers, 0));
1533 /* __builtin_apply_args returns block of memory allocated on
1534 the stack into which is stored the arg pointer, structure
1535 value address, static chain, and all the registers that might
1536 possibly be used in performing a function call. The code is
1537 moved to the start of the function so the incoming values are
1538 saved. */
1540 static rtx
1541 expand_builtin_apply_args (void)
1543 /* Don't do __builtin_apply_args more than once in a function.
1544 Save the result of the first call and reuse it. */
1545 if (apply_args_value != 0)
1546 return apply_args_value;
1548 /* When this function is called, it means that registers must be
1549 saved on entry to this function. So we migrate the
1550 call to the first insn of this function. */
1551 rtx temp;
1553 start_sequence ();
1554 temp = expand_builtin_apply_args_1 ();
1555 rtx_insn *seq = get_insns ();
1556 end_sequence ();
1558 apply_args_value = temp;
1560 /* Put the insns after the NOTE that starts the function.
1561 If this is inside a start_sequence, make the outer-level insn
1562 chain current, so the code is placed at the start of the
1563 function. If internal_arg_pointer is a non-virtual pseudo,
1564 it needs to be placed after the function that initializes
1565 that pseudo. */
1566 push_topmost_sequence ();
1567 if (REG_P (crtl->args.internal_arg_pointer)
1568 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1569 emit_insn_before (seq, parm_birth_insn);
1570 else
1571 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1572 pop_topmost_sequence ();
1573 return temp;
1577 /* Perform an untyped call and save the state required to perform an
1578 untyped return of whatever value was returned by the given function. */
1580 static rtx
1581 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1583 int size, align, regno;
1584 fixed_size_mode mode;
1585 rtx incoming_args, result, reg, dest, src;
1586 rtx_call_insn *call_insn;
1587 rtx old_stack_level = 0;
1588 rtx call_fusage = 0;
1589 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1591 arguments = convert_memory_address (Pmode, arguments);
1593 /* Create a block where the return registers can be saved. */
1594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1596 /* Fetch the arg pointer from the ARGUMENTS block. */
1597 incoming_args = gen_reg_rtx (Pmode);
1598 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1599 if (!STACK_GROWS_DOWNWARD)
1600 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1601 incoming_args, 0, OPTAB_LIB_WIDEN);
1603 /* Push a new argument block and copy the arguments. Do not allow
1604 the (potential) memcpy call below to interfere with our stack
1605 manipulations. */
1606 do_pending_stack_adjust ();
1607 NO_DEFER_POP;
1609 /* Save the stack with nonlocal if available. */
1610 if (targetm.have_save_stack_nonlocal ())
1611 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1612 else
1613 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1615 /* Allocate a block of memory onto the stack and copy the memory
1616 arguments to the outgoing arguments address. We can pass TRUE
1617 as the 4th argument because we just saved the stack pointer
1618 and will restore it right after the call. */
1619 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1621 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1622 may have already set current_function_calls_alloca to true.
1623 current_function_calls_alloca won't be set if argsize is zero,
1624 so we have to guarantee need_drap is true here. */
1625 if (SUPPORTS_STACK_ALIGNMENT)
1626 crtl->need_drap = true;
1628 dest = virtual_outgoing_args_rtx;
1629 if (!STACK_GROWS_DOWNWARD)
1631 if (CONST_INT_P (argsize))
1632 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1633 else
1634 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1636 dest = gen_rtx_MEM (BLKmode, dest);
1637 set_mem_align (dest, PARM_BOUNDARY);
1638 src = gen_rtx_MEM (BLKmode, incoming_args);
1639 set_mem_align (src, PARM_BOUNDARY);
1640 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1642 /* Refer to the argument block. */
1643 apply_args_size ();
1644 arguments = gen_rtx_MEM (BLKmode, arguments);
1645 set_mem_align (arguments, PARM_BOUNDARY);
1647 /* Walk past the arg-pointer and structure value address. */
1648 size = GET_MODE_SIZE (Pmode);
1649 if (struct_value)
1650 size += GET_MODE_SIZE (Pmode);
1652 /* Restore each of the registers previously saved. Make USE insns
1653 for each of these registers for use in making the call. */
1654 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1655 if ((mode = apply_args_mode[regno]) != VOIDmode)
1657 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1658 if (size % align != 0)
1659 size = CEIL (size, align) * align;
1660 reg = gen_rtx_REG (mode, regno);
1661 emit_move_insn (reg, adjust_address (arguments, mode, size));
1662 use_reg (&call_fusage, reg);
1663 size += GET_MODE_SIZE (mode);
1666 /* Restore the structure value address unless this is passed as an
1667 "invisible" first argument. */
1668 size = GET_MODE_SIZE (Pmode);
1669 if (struct_value)
1671 rtx value = gen_reg_rtx (Pmode);
1672 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1673 emit_move_insn (struct_value, value);
1674 if (REG_P (struct_value))
1675 use_reg (&call_fusage, struct_value);
1676 size += GET_MODE_SIZE (Pmode);
1679 /* All arguments and registers used for the call are set up by now! */
1680 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1682 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1683 and we don't want to load it into a register as an optimization,
1684 because prepare_call_address already did it if it should be done. */
1685 if (GET_CODE (function) != SYMBOL_REF)
1686 function = memory_address (FUNCTION_MODE, function);
1688 /* Generate the actual call instruction and save the return value. */
1689 if (targetm.have_untyped_call ())
1691 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1692 emit_call_insn (targetm.gen_untyped_call (mem, result,
1693 result_vector (1, result)));
1695 else if (targetm.have_call_value ())
1697 rtx valreg = 0;
1699 /* Locate the unique return register. It is not possible to
1700 express a call that sets more than one return register using
1701 call_value; use untyped_call for that. In fact, untyped_call
1702 only needs to save the return registers in the given block. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_result_mode[regno]) != VOIDmode)
1706 gcc_assert (!valreg); /* have_untyped_call required. */
1708 valreg = gen_rtx_REG (mode, regno);
1711 emit_insn (targetm.gen_call_value (valreg,
1712 gen_rtx_MEM (FUNCTION_MODE, function),
1713 const0_rtx, NULL_RTX, const0_rtx));
1715 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1717 else
1718 gcc_unreachable ();
1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
1725 /* Restore the stack. */
1726 if (targetm.have_save_stack_nonlocal ())
1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1728 else
1729 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1730 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732 OK_DEFER_POP;
1734 /* Return the address of the result block. */
1735 result = copy_addr_to_reg (XEXP (result, 0));
1736 return convert_memory_address (ptr_mode, result);
1739 /* Perform an untyped return. */
1741 static void
1742 expand_builtin_return (rtx result)
1744 int size, align, regno;
1745 fixed_size_mode mode;
1746 rtx reg;
1747 rtx_insn *call_fusage = 0;
1749 result = convert_memory_address (Pmode, result);
1751 apply_result_size ();
1752 result = gen_rtx_MEM (BLKmode, result);
1754 if (targetm.have_untyped_return ())
1756 rtx vector = result_vector (0, result);
1757 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1758 emit_barrier ();
1759 return;
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type)
1793 switch (TREE_CODE (type))
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1816 /* Expand a call EXP to __builtin_classify_type. */
1818 static rtx
1819 expand_builtin_classify_type (tree exp)
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1826 /* This helper macro, meant to be used in mathfn_built_in below, determines
1827 which among a set of builtin math functions is appropriate for a given type
1828 mode. The `F' (float) and `L' (long double) are automatically generated
1829 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1830 types, there are additional types that are considered with 'F32', 'F64',
1831 'F128', etc. suffixes. */
1832 #define CASE_MATHFN(MATHFN) \
1833 CASE_CFN_##MATHFN: \
1834 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1835 fcodel = BUILT_IN_##MATHFN##L ; break;
1836 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1837 types. */
1838 #define CASE_MATHFN_FLOATN(MATHFN) \
1839 CASE_CFN_##MATHFN: \
1840 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1841 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1842 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1843 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1844 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1845 break;
1846 /* Similar to above, but appends _R after any F/L suffix. */
1847 #define CASE_MATHFN_REENT(MATHFN) \
1848 case CFN_BUILT_IN_##MATHFN##_R: \
1849 case CFN_BUILT_IN_##MATHFN##F_R: \
1850 case CFN_BUILT_IN_##MATHFN##L_R: \
1851 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1852 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1854 /* Return a function equivalent to FN but operating on floating-point
1855 values of type TYPE, or END_BUILTINS if no such function exists.
1856 This is purely an operation on function codes; it does not guarantee
1857 that the target actually has an implementation of the function. */
1859 static built_in_function
1860 mathfn_built_in_2 (tree type, combined_fn fn)
1862 tree mtype;
1863 built_in_function fcode, fcodef, fcodel;
1864 built_in_function fcodef16 = END_BUILTINS;
1865 built_in_function fcodef32 = END_BUILTINS;
1866 built_in_function fcodef64 = END_BUILTINS;
1867 built_in_function fcodef128 = END_BUILTINS;
1868 built_in_function fcodef32x = END_BUILTINS;
1869 built_in_function fcodef64x = END_BUILTINS;
1870 built_in_function fcodef128x = END_BUILTINS;
1872 switch (fn)
1874 CASE_MATHFN (ACOS)
1875 CASE_MATHFN (ACOSH)
1876 CASE_MATHFN (ASIN)
1877 CASE_MATHFN (ASINH)
1878 CASE_MATHFN (ATAN)
1879 CASE_MATHFN (ATAN2)
1880 CASE_MATHFN (ATANH)
1881 CASE_MATHFN (CBRT)
1882 CASE_MATHFN_FLOATN (CEIL)
1883 CASE_MATHFN (CEXPI)
1884 CASE_MATHFN_FLOATN (COPYSIGN)
1885 CASE_MATHFN (COS)
1886 CASE_MATHFN (COSH)
1887 CASE_MATHFN (DREM)
1888 CASE_MATHFN (ERF)
1889 CASE_MATHFN (ERFC)
1890 CASE_MATHFN (EXP)
1891 CASE_MATHFN (EXP10)
1892 CASE_MATHFN (EXP2)
1893 CASE_MATHFN (EXPM1)
1894 CASE_MATHFN (FABS)
1895 CASE_MATHFN (FDIM)
1896 CASE_MATHFN_FLOATN (FLOOR)
1897 CASE_MATHFN_FLOATN (FMA)
1898 CASE_MATHFN_FLOATN (FMAX)
1899 CASE_MATHFN_FLOATN (FMIN)
1900 CASE_MATHFN (FMOD)
1901 CASE_MATHFN (FREXP)
1902 CASE_MATHFN (GAMMA)
1903 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1904 CASE_MATHFN (HUGE_VAL)
1905 CASE_MATHFN (HYPOT)
1906 CASE_MATHFN (ILOGB)
1907 CASE_MATHFN (ICEIL)
1908 CASE_MATHFN (IFLOOR)
1909 CASE_MATHFN (INF)
1910 CASE_MATHFN (IRINT)
1911 CASE_MATHFN (IROUND)
1912 CASE_MATHFN (ISINF)
1913 CASE_MATHFN (J0)
1914 CASE_MATHFN (J1)
1915 CASE_MATHFN (JN)
1916 CASE_MATHFN (LCEIL)
1917 CASE_MATHFN (LDEXP)
1918 CASE_MATHFN (LFLOOR)
1919 CASE_MATHFN (LGAMMA)
1920 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1921 CASE_MATHFN (LLCEIL)
1922 CASE_MATHFN (LLFLOOR)
1923 CASE_MATHFN (LLRINT)
1924 CASE_MATHFN (LLROUND)
1925 CASE_MATHFN (LOG)
1926 CASE_MATHFN (LOG10)
1927 CASE_MATHFN (LOG1P)
1928 CASE_MATHFN (LOG2)
1929 CASE_MATHFN (LOGB)
1930 CASE_MATHFN (LRINT)
1931 CASE_MATHFN (LROUND)
1932 CASE_MATHFN (MODF)
1933 CASE_MATHFN (NAN)
1934 CASE_MATHFN (NANS)
1935 CASE_MATHFN_FLOATN (NEARBYINT)
1936 CASE_MATHFN (NEXTAFTER)
1937 CASE_MATHFN (NEXTTOWARD)
1938 CASE_MATHFN (POW)
1939 CASE_MATHFN (POWI)
1940 CASE_MATHFN (POW10)
1941 CASE_MATHFN (REMAINDER)
1942 CASE_MATHFN (REMQUO)
1943 CASE_MATHFN_FLOATN (RINT)
1944 CASE_MATHFN_FLOATN (ROUND)
1945 CASE_MATHFN (SCALB)
1946 CASE_MATHFN (SCALBLN)
1947 CASE_MATHFN (SCALBN)
1948 CASE_MATHFN (SIGNBIT)
1949 CASE_MATHFN (SIGNIFICAND)
1950 CASE_MATHFN (SIN)
1951 CASE_MATHFN (SINCOS)
1952 CASE_MATHFN (SINH)
1953 CASE_MATHFN_FLOATN (SQRT)
1954 CASE_MATHFN (TAN)
1955 CASE_MATHFN (TANH)
1956 CASE_MATHFN (TGAMMA)
1957 CASE_MATHFN_FLOATN (TRUNC)
1958 CASE_MATHFN (Y0)
1959 CASE_MATHFN (Y1)
1960 CASE_MATHFN (YN)
1962 default:
1963 return END_BUILTINS;
1966 mtype = TYPE_MAIN_VARIANT (type);
1967 if (mtype == double_type_node)
1968 return fcode;
1969 else if (mtype == float_type_node)
1970 return fcodef;
1971 else if (mtype == long_double_type_node)
1972 return fcodel;
1973 else if (mtype == float16_type_node)
1974 return fcodef16;
1975 else if (mtype == float32_type_node)
1976 return fcodef32;
1977 else if (mtype == float64_type_node)
1978 return fcodef64;
1979 else if (mtype == float128_type_node)
1980 return fcodef128;
1981 else if (mtype == float32x_type_node)
1982 return fcodef32x;
1983 else if (mtype == float64x_type_node)
1984 return fcodef64x;
1985 else if (mtype == float128x_type_node)
1986 return fcodef128x;
1987 else
1988 return END_BUILTINS;
1991 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1992 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1993 otherwise use the explicit declaration. If we can't do the conversion,
1994 return null. */
1996 static tree
1997 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1999 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2000 if (fcode2 == END_BUILTINS)
2001 return NULL_TREE;
2003 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2004 return NULL_TREE;
2006 return builtin_decl_explicit (fcode2);
2009 /* Like mathfn_built_in_1, but always use the implicit array. */
2011 tree
2012 mathfn_built_in (tree type, combined_fn fn)
2014 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2017 /* Like mathfn_built_in_1, but take a built_in_function and
2018 always use the implicit array. */
2020 tree
2021 mathfn_built_in (tree type, enum built_in_function fn)
2023 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2026 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2027 return its code, otherwise return IFN_LAST. Note that this function
2028 only tests whether the function is defined in internals.def, not whether
2029 it is actually available on the target. */
2031 internal_fn
2032 associated_internal_fn (tree fndecl)
2034 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2035 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2036 switch (DECL_FUNCTION_CODE (fndecl))
2038 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2039 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2040 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2041 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2042 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2043 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2044 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2045 #include "internal-fn.def"
2047 CASE_FLT_FN (BUILT_IN_POW10):
2048 return IFN_EXP10;
2050 CASE_FLT_FN (BUILT_IN_DREM):
2051 return IFN_REMAINDER;
2053 CASE_FLT_FN (BUILT_IN_SCALBN):
2054 CASE_FLT_FN (BUILT_IN_SCALBLN):
2055 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2056 return IFN_LDEXP;
2057 return IFN_LAST;
2059 default:
2060 return IFN_LAST;
2064 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2065 on the current target by a call to an internal function, return the
2066 code of that internal function, otherwise return IFN_LAST. The caller
2067 is responsible for ensuring that any side-effects of the built-in
2068 call are dealt with correctly. E.g. if CALL sets errno, the caller
2069 must decide that the errno result isn't needed or make it available
2070 in some other way. */
2072 internal_fn
2073 replacement_internal_fn (gcall *call)
2075 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2077 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2078 if (ifn != IFN_LAST)
2080 tree_pair types = direct_internal_fn_types (ifn, call);
2081 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2082 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2083 return ifn;
2086 return IFN_LAST;
2089 /* Expand a call to the builtin trinary math functions (fma).
2090 Return NULL_RTX if a normal call should be emitted rather than expanding the
2091 function in-line. EXP is the expression that is a call to the builtin
2092 function; if convenient, the result should be placed in TARGET.
2093 SUBTARGET may be used as the target for computing one of EXP's
2094 operands. */
2096 static rtx
2097 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2099 optab builtin_optab;
2100 rtx op0, op1, op2, result;
2101 rtx_insn *insns;
2102 tree fndecl = get_callee_fndecl (exp);
2103 tree arg0, arg1, arg2;
2104 machine_mode mode;
2106 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2107 return NULL_RTX;
2109 arg0 = CALL_EXPR_ARG (exp, 0);
2110 arg1 = CALL_EXPR_ARG (exp, 1);
2111 arg2 = CALL_EXPR_ARG (exp, 2);
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_FMA):
2116 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2117 builtin_optab = fma_optab; break;
2118 default:
2119 gcc_unreachable ();
2122 /* Make a suitable register to place result in. */
2123 mode = TYPE_MODE (TREE_TYPE (exp));
2125 /* Before working hard, check whether the instruction is available. */
2126 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2127 return NULL_RTX;
2129 result = gen_reg_rtx (mode);
2131 /* Always stabilize the argument list. */
2132 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2133 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2134 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2136 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2137 op1 = expand_normal (arg1);
2138 op2 = expand_normal (arg2);
2140 start_sequence ();
2142 /* Compute into RESULT.
2143 Set RESULT to wherever the result comes back. */
2144 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2145 result, 0);
2147 /* If we were unable to expand via the builtin, stop the sequence
2148 (without outputting the insns) and call to the library function
2149 with the stabilized argument list. */
2150 if (result == 0)
2152 end_sequence ();
2153 return expand_call (exp, target, target == const0_rtx);
2156 /* Output the entire sequence. */
2157 insns = get_insns ();
2158 end_sequence ();
2159 emit_insn (insns);
2161 return result;
2164 /* Expand a call to the builtin sin and cos math functions.
2165 Return NULL_RTX if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's
2169 operands. */
2171 static rtx
2172 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2174 optab builtin_optab;
2175 rtx op0;
2176 rtx_insn *insns;
2177 tree fndecl = get_callee_fndecl (exp);
2178 machine_mode mode;
2179 tree arg;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2182 return NULL_RTX;
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_SIN):
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 builtin_optab = sincos_optab; break;
2191 default:
2192 gcc_unreachable ();
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2198 /* Check if sincos insn is available, otherwise fallback
2199 to sin or cos insn. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 switch (DECL_FUNCTION_CODE (fndecl))
2203 CASE_FLT_FN (BUILT_IN_SIN):
2204 builtin_optab = sin_optab; break;
2205 CASE_FLT_FN (BUILT_IN_COS):
2206 builtin_optab = cos_optab; break;
2207 default:
2208 gcc_unreachable ();
2211 /* Before working hard, check whether the instruction is available. */
2212 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2214 rtx result = gen_reg_rtx (mode);
2216 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2217 need to expand the argument again. This way, we will not perform
2218 side-effects more the once. */
2219 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2221 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2223 start_sequence ();
2225 /* Compute into RESULT.
2226 Set RESULT to wherever the result comes back. */
2227 if (builtin_optab == sincos_optab)
2229 int ok;
2231 switch (DECL_FUNCTION_CODE (fndecl))
2233 CASE_FLT_FN (BUILT_IN_SIN):
2234 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2235 break;
2236 CASE_FLT_FN (BUILT_IN_COS):
2237 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2238 break;
2239 default:
2240 gcc_unreachable ();
2242 gcc_assert (ok);
2244 else
2245 result = expand_unop (mode, builtin_optab, op0, result, 0);
2247 if (result != 0)
2249 /* Output the entire sequence. */
2250 insns = get_insns ();
2251 end_sequence ();
2252 emit_insn (insns);
2253 return result;
2256 /* If we were unable to expand via the builtin, stop the sequence
2257 (without outputting the insns) and call to the library function
2258 with the stabilized argument list. */
2259 end_sequence ();
2262 return expand_call (exp, target, target == const0_rtx);
2265 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2266 return an RTL instruction code that implements the functionality.
2267 If that isn't possible or available return CODE_FOR_nothing. */
2269 static enum insn_code
2270 interclass_mathfn_icode (tree arg, tree fndecl)
2272 bool errno_set = false;
2273 optab builtin_optab = unknown_optab;
2274 machine_mode mode;
2276 switch (DECL_FUNCTION_CODE (fndecl))
2278 CASE_FLT_FN (BUILT_IN_ILOGB):
2279 errno_set = true; builtin_optab = ilogb_optab; break;
2280 CASE_FLT_FN (BUILT_IN_ISINF):
2281 builtin_optab = isinf_optab; break;
2282 case BUILT_IN_ISNORMAL:
2283 case BUILT_IN_ISFINITE:
2284 CASE_FLT_FN (BUILT_IN_FINITE):
2285 case BUILT_IN_FINITED32:
2286 case BUILT_IN_FINITED64:
2287 case BUILT_IN_FINITED128:
2288 case BUILT_IN_ISINFD32:
2289 case BUILT_IN_ISINFD64:
2290 case BUILT_IN_ISINFD128:
2291 /* These builtins have no optabs (yet). */
2292 break;
2293 default:
2294 gcc_unreachable ();
2297 /* There's no easy way to detect the case we need to set EDOM. */
2298 if (flag_errno_math && errno_set)
2299 return CODE_FOR_nothing;
2301 /* Optab mode depends on the mode of the input argument. */
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 if (builtin_optab)
2305 return optab_handler (builtin_optab, mode);
2306 return CODE_FOR_nothing;
2309 /* Expand a call to one of the builtin math functions that operate on
2310 floating point argument and output an integer result (ilogb, isinf,
2311 isnan, etc).
2312 Return 0 if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function; if convenient, the result should be placed in TARGET. */
2316 static rtx
2317 expand_builtin_interclass_mathfn (tree exp, rtx target)
2319 enum insn_code icode = CODE_FOR_nothing;
2320 rtx op0;
2321 tree fndecl = get_callee_fndecl (exp);
2322 machine_mode mode;
2323 tree arg;
2325 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2326 return NULL_RTX;
2328 arg = CALL_EXPR_ARG (exp, 0);
2329 icode = interclass_mathfn_icode (arg, fndecl);
2330 mode = TYPE_MODE (TREE_TYPE (arg));
2332 if (icode != CODE_FOR_nothing)
2334 struct expand_operand ops[1];
2335 rtx_insn *last = get_last_insn ();
2336 tree orig_arg = arg;
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2343 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2345 if (mode != GET_MODE (op0))
2346 op0 = convert_to_mode (mode, op0, 0);
2348 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2349 if (maybe_legitimize_operands (icode, 0, 1, ops)
2350 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2351 return ops[0].value;
2353 delete_insns_since (last);
2354 CALL_EXPR_ARG (exp, 0) = orig_arg;
2357 return NULL_RTX;
2360 /* Expand a call to the builtin sincos math function.
2361 Return NULL_RTX if a normal call should be emitted rather than expanding the
2362 function in-line. EXP is the expression that is a call to the builtin
2363 function. */
2365 static rtx
2366 expand_builtin_sincos (tree exp)
2368 rtx op0, op1, op2, target1, target2;
2369 machine_mode mode;
2370 tree arg, sinp, cosp;
2371 int result;
2372 location_t loc = EXPR_LOCATION (exp);
2373 tree alias_type, alias_off;
2375 if (!validate_arglist (exp, REAL_TYPE,
2376 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2377 return NULL_RTX;
2379 arg = CALL_EXPR_ARG (exp, 0);
2380 sinp = CALL_EXPR_ARG (exp, 1);
2381 cosp = CALL_EXPR_ARG (exp, 2);
2383 /* Make a suitable register to place result in. */
2384 mode = TYPE_MODE (TREE_TYPE (arg));
2386 /* Check if sincos insn is available, otherwise emit the call. */
2387 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2388 return NULL_RTX;
2390 target1 = gen_reg_rtx (mode);
2391 target2 = gen_reg_rtx (mode);
2393 op0 = expand_normal (arg);
2394 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2395 alias_off = build_int_cst (alias_type, 0);
2396 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2397 sinp, alias_off));
2398 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2399 cosp, alias_off));
2401 /* Compute into target1 and target2.
2402 Set TARGET to wherever the result comes back. */
2403 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2404 gcc_assert (result);
2406 /* Move target1 and target2 to the memory locations indicated
2407 by op1 and op2. */
2408 emit_move_insn (op1, target1);
2409 emit_move_insn (op2, target2);
2411 return const0_rtx;
2414 /* Expand a call to the internal cexpi builtin to the sincos math function.
2415 EXP is the expression that is a call to the builtin function; if convenient,
2416 the result should be placed in TARGET. */
2418 static rtx
2419 expand_builtin_cexpi (tree exp, rtx target)
2421 tree fndecl = get_callee_fndecl (exp);
2422 tree arg, type;
2423 machine_mode mode;
2424 rtx op0, op1, op2;
2425 location_t loc = EXPR_LOCATION (exp);
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 type = TREE_TYPE (arg);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 /* Try expanding via a sincos optab, fall back to emitting a libcall
2435 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2436 is only generated from sincos, cexp or if we have either of them. */
2437 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2439 op1 = gen_reg_rtx (mode);
2440 op2 = gen_reg_rtx (mode);
2442 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2444 /* Compute into op1 and op2. */
2445 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2447 else if (targetm.libc_has_function (function_sincos))
2449 tree call, fn = NULL_TREE;
2450 tree top1, top2;
2451 rtx op1a, op2a;
2453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2456 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2457 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2458 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2459 else
2460 gcc_unreachable ();
2462 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2463 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2464 op1a = copy_addr_to_reg (XEXP (op1, 0));
2465 op2a = copy_addr_to_reg (XEXP (op2, 0));
2466 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2467 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2469 /* Make sure not to fold the sincos call again. */
2470 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2471 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2472 call, 3, arg, top1, top2));
2474 else
2476 tree call, fn = NULL_TREE, narg;
2477 tree ctype = build_complex_type (type);
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2485 else
2486 gcc_unreachable ();
2488 /* If we don't have a decl for cexp create one. This is the
2489 friendliest fallback if the user calls __builtin_cexpi
2490 without full target C99 function support. */
2491 if (fn == NULL_TREE)
2493 tree fntype;
2494 const char *name = NULL;
2496 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2497 name = "cexpf";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2499 name = "cexp";
2500 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2501 name = "cexpl";
2503 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2504 fn = build_fn_decl (name, fntype);
2507 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2508 build_real (type, dconst0), arg);
2510 /* Make sure not to fold the cexp call again. */
2511 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2512 return expand_expr (build_call_nary (ctype, call, 1, narg),
2513 target, VOIDmode, EXPAND_NORMAL);
2516 /* Now build the proper return type. */
2517 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2518 make_tree (TREE_TYPE (arg), op2),
2519 make_tree (TREE_TYPE (arg), op1)),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Conveniently construct a function call expression. FNDECL names the
2524 function to be called, N is the number of arguments, and the "..."
2525 parameters are the argument expressions. Unlike build_call_exr
2526 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2528 static tree
2529 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2531 va_list ap;
2532 tree fntype = TREE_TYPE (fndecl);
2533 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2535 va_start (ap, n);
2536 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2537 va_end (ap);
2538 SET_EXPR_LOCATION (fn, loc);
2539 return fn;
2542 /* Expand a call to one of the builtin rounding functions gcc defines
2543 as an extension (lfloor and lceil). As these are gcc extensions we
2544 do not need to worry about setting errno to EDOM.
2545 If expanding via optab fails, lower expression to (int)(floor(x)).
2546 EXP is the expression that is a call to the builtin function;
2547 if convenient, the result should be placed in TARGET. */
2549 static rtx
2550 expand_builtin_int_roundingfn (tree exp, rtx target)
2552 convert_optab builtin_optab;
2553 rtx op0, tmp;
2554 rtx_insn *insns;
2555 tree fndecl = get_callee_fndecl (exp);
2556 enum built_in_function fallback_fn;
2557 tree fallback_fndecl;
2558 machine_mode mode;
2559 tree arg;
2561 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2562 gcc_unreachable ();
2564 arg = CALL_EXPR_ARG (exp, 0);
2566 switch (DECL_FUNCTION_CODE (fndecl))
2568 CASE_FLT_FN (BUILT_IN_ICEIL):
2569 CASE_FLT_FN (BUILT_IN_LCEIL):
2570 CASE_FLT_FN (BUILT_IN_LLCEIL):
2571 builtin_optab = lceil_optab;
2572 fallback_fn = BUILT_IN_CEIL;
2573 break;
2575 CASE_FLT_FN (BUILT_IN_IFLOOR):
2576 CASE_FLT_FN (BUILT_IN_LFLOOR):
2577 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2578 builtin_optab = lfloor_optab;
2579 fallback_fn = BUILT_IN_FLOOR;
2580 break;
2582 default:
2583 gcc_unreachable ();
2586 /* Make a suitable register to place result in. */
2587 mode = TYPE_MODE (TREE_TYPE (exp));
2589 target = gen_reg_rtx (mode);
2591 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2592 need to expand the argument again. This way, we will not perform
2593 side-effects more the once. */
2594 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2596 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2598 start_sequence ();
2600 /* Compute into TARGET. */
2601 if (expand_sfix_optab (target, op0, builtin_optab))
2603 /* Output the entire sequence. */
2604 insns = get_insns ();
2605 end_sequence ();
2606 emit_insn (insns);
2607 return target;
2610 /* If we were unable to expand via the builtin, stop the sequence
2611 (without outputting the insns). */
2612 end_sequence ();
2614 /* Fall back to floating point rounding optab. */
2615 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2617 /* For non-C99 targets we may end up without a fallback fndecl here
2618 if the user called __builtin_lfloor directly. In this case emit
2619 a call to the floor/ceil variants nevertheless. This should result
2620 in the best user experience for not full C99 targets. */
2621 if (fallback_fndecl == NULL_TREE)
2623 tree fntype;
2624 const char *name = NULL;
2626 switch (DECL_FUNCTION_CODE (fndecl))
2628 case BUILT_IN_ICEIL:
2629 case BUILT_IN_LCEIL:
2630 case BUILT_IN_LLCEIL:
2631 name = "ceil";
2632 break;
2633 case BUILT_IN_ICEILF:
2634 case BUILT_IN_LCEILF:
2635 case BUILT_IN_LLCEILF:
2636 name = "ceilf";
2637 break;
2638 case BUILT_IN_ICEILL:
2639 case BUILT_IN_LCEILL:
2640 case BUILT_IN_LLCEILL:
2641 name = "ceill";
2642 break;
2643 case BUILT_IN_IFLOOR:
2644 case BUILT_IN_LFLOOR:
2645 case BUILT_IN_LLFLOOR:
2646 name = "floor";
2647 break;
2648 case BUILT_IN_IFLOORF:
2649 case BUILT_IN_LFLOORF:
2650 case BUILT_IN_LLFLOORF:
2651 name = "floorf";
2652 break;
2653 case BUILT_IN_IFLOORL:
2654 case BUILT_IN_LFLOORL:
2655 case BUILT_IN_LLFLOORL:
2656 name = "floorl";
2657 break;
2658 default:
2659 gcc_unreachable ();
2662 fntype = build_function_type_list (TREE_TYPE (arg),
2663 TREE_TYPE (arg), NULL_TREE);
2664 fallback_fndecl = build_fn_decl (name, fntype);
2667 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2669 tmp = expand_normal (exp);
2670 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2672 /* Truncate the result of floating point optab to integer
2673 via expand_fix (). */
2674 target = gen_reg_rtx (mode);
2675 expand_fix (target, tmp, 0);
2677 return target;
2680 /* Expand a call to one of the builtin math functions doing integer
2681 conversion (lrint).
2682 Return 0 if a normal call should be emitted rather than expanding the
2683 function in-line. EXP is the expression that is a call to the builtin
2684 function; if convenient, the result should be placed in TARGET. */
2686 static rtx
2687 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2689 convert_optab builtin_optab;
2690 rtx op0;
2691 rtx_insn *insns;
2692 tree fndecl = get_callee_fndecl (exp);
2693 tree arg;
2694 machine_mode mode;
2695 enum built_in_function fallback_fn = BUILT_IN_NONE;
2697 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2698 gcc_unreachable ();
2700 arg = CALL_EXPR_ARG (exp, 0);
2702 switch (DECL_FUNCTION_CODE (fndecl))
2704 CASE_FLT_FN (BUILT_IN_IRINT):
2705 fallback_fn = BUILT_IN_LRINT;
2706 gcc_fallthrough ();
2707 CASE_FLT_FN (BUILT_IN_LRINT):
2708 CASE_FLT_FN (BUILT_IN_LLRINT):
2709 builtin_optab = lrint_optab;
2710 break;
2712 CASE_FLT_FN (BUILT_IN_IROUND):
2713 fallback_fn = BUILT_IN_LROUND;
2714 gcc_fallthrough ();
2715 CASE_FLT_FN (BUILT_IN_LROUND):
2716 CASE_FLT_FN (BUILT_IN_LLROUND):
2717 builtin_optab = lround_optab;
2718 break;
2720 default:
2721 gcc_unreachable ();
2724 /* There's no easy way to detect the case we need to set EDOM. */
2725 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2726 return NULL_RTX;
2728 /* Make a suitable register to place result in. */
2729 mode = TYPE_MODE (TREE_TYPE (exp));
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (!flag_errno_math)
2734 rtx result = gen_reg_rtx (mode);
2736 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2737 need to expand the argument again. This way, we will not perform
2738 side-effects more the once. */
2739 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2741 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2743 start_sequence ();
2745 if (expand_sfix_optab (result, op0, builtin_optab))
2747 /* Output the entire sequence. */
2748 insns = get_insns ();
2749 end_sequence ();
2750 emit_insn (insns);
2751 return result;
2754 /* If we were unable to expand via the builtin, stop the sequence
2755 (without outputting the insns) and call to the library function
2756 with the stabilized argument list. */
2757 end_sequence ();
2760 if (fallback_fn != BUILT_IN_NONE)
2762 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2763 targets, (int) round (x) should never be transformed into
2764 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2765 a call to lround in the hope that the target provides at least some
2766 C99 functions. This should result in the best user experience for
2767 not full C99 targets. */
2768 tree fallback_fndecl = mathfn_built_in_1
2769 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2771 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2772 fallback_fndecl, 1, arg);
2774 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2775 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2776 return convert_to_mode (mode, target, 0);
2779 return expand_call (exp, target, target == const0_rtx);
2782 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2783 a normal call should be emitted rather than expanding the function
2784 in-line. EXP is the expression that is a call to the builtin
2785 function; if convenient, the result should be placed in TARGET. */
2787 static rtx
2788 expand_builtin_powi (tree exp, rtx target)
2790 tree arg0, arg1;
2791 rtx op0, op1;
2792 machine_mode mode;
2793 machine_mode mode2;
2795 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2796 return NULL_RTX;
2798 arg0 = CALL_EXPR_ARG (exp, 0);
2799 arg1 = CALL_EXPR_ARG (exp, 1);
2800 mode = TYPE_MODE (TREE_TYPE (exp));
2802 /* Emit a libcall to libgcc. */
2804 /* Mode of the 2nd argument must match that of an int. */
2805 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2807 if (target == NULL_RTX)
2808 target = gen_reg_rtx (mode);
2810 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2811 if (GET_MODE (op0) != mode)
2812 op0 = convert_to_mode (mode, op0, 0);
2813 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2814 if (GET_MODE (op1) != mode2)
2815 op1 = convert_to_mode (mode2, op1, 0);
2817 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2818 target, LCT_CONST, mode,
2819 op0, mode, op1, mode2);
2821 return target;
2824 /* Expand expression EXP which is a call to the strlen builtin. Return
2825 NULL_RTX if we failed the caller should emit a normal call, otherwise
2826 try to get the result in TARGET, if convenient. */
2828 static rtx
2829 expand_builtin_strlen (tree exp, rtx target,
2830 machine_mode target_mode)
2832 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2833 return NULL_RTX;
2835 struct expand_operand ops[4];
2836 rtx pat;
2837 tree len;
2838 tree src = CALL_EXPR_ARG (exp, 0);
2839 rtx src_reg;
2840 rtx_insn *before_strlen;
2841 machine_mode insn_mode;
2842 enum insn_code icode = CODE_FOR_nothing;
2843 unsigned int align;
2845 /* If the length can be computed at compile-time, return it. */
2846 len = c_strlen (src, 0);
2847 if (len)
2848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2850 /* If the length can be computed at compile-time and is constant
2851 integer, but there are side-effects in src, evaluate
2852 src for side-effects, then return len.
2853 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2854 can be optimized into: i++; x = 3; */
2855 len = c_strlen (src, 1);
2856 if (len && TREE_CODE (len) == INTEGER_CST)
2858 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2859 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2862 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2864 /* If SRC is not a pointer type, don't do this operation inline. */
2865 if (align == 0)
2866 return NULL_RTX;
2868 /* Bail out if we can't compute strlen in the right mode. */
2869 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2871 icode = optab_handler (strlen_optab, insn_mode);
2872 if (icode != CODE_FOR_nothing)
2873 break;
2875 if (insn_mode == VOIDmode)
2876 return NULL_RTX;
2878 /* Make a place to hold the source address. We will not expand
2879 the actual source until we are sure that the expansion will
2880 not fail -- there are trees that cannot be expanded twice. */
2881 src_reg = gen_reg_rtx (Pmode);
2883 /* Mark the beginning of the strlen sequence so we can emit the
2884 source operand later. */
2885 before_strlen = get_last_insn ();
2887 create_output_operand (&ops[0], target, insn_mode);
2888 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2889 create_integer_operand (&ops[2], 0);
2890 create_integer_operand (&ops[3], align);
2891 if (!maybe_expand_insn (icode, 4, ops))
2892 return NULL_RTX;
2894 /* Check to see if the argument was declared attribute nonstring
2895 and if so, issue a warning since at this point it's not known
2896 to be nul-terminated. */
2897 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2899 /* Now that we are assured of success, expand the source. */
2900 start_sequence ();
2901 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2902 if (pat != src_reg)
2904 #ifdef POINTERS_EXTEND_UNSIGNED
2905 if (GET_MODE (pat) != Pmode)
2906 pat = convert_to_mode (Pmode, pat,
2907 POINTERS_EXTEND_UNSIGNED);
2908 #endif
2909 emit_move_insn (src_reg, pat);
2911 pat = get_insns ();
2912 end_sequence ();
2914 if (before_strlen)
2915 emit_insn_after (pat, before_strlen);
2916 else
2917 emit_insn_before (pat, get_insns ());
2919 /* Return the value in the proper mode for this function. */
2920 if (GET_MODE (ops[0].value) == target_mode)
2921 target = ops[0].value;
2922 else if (target != 0)
2923 convert_move (target, ops[0].value, 0);
2924 else
2925 target = convert_to_mode (target_mode, ops[0].value, 0);
2927 return target;
2930 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2931 bytes from constant string DATA + OFFSET and return it as target
2932 constant. */
2934 static rtx
2935 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2936 scalar_int_mode mode)
2938 const char *str = (const char *) data;
2940 gcc_assert (offset >= 0
2941 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2942 <= strlen (str) + 1));
2944 return c_readstr (str + offset, mode);
2947 /* LEN specify length of the block of memcpy/memset operation.
2948 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2949 In some cases we can make very likely guess on max size, then we
2950 set it into PROBABLE_MAX_SIZE. */
2952 static void
2953 determine_block_size (tree len, rtx len_rtx,
2954 unsigned HOST_WIDE_INT *min_size,
2955 unsigned HOST_WIDE_INT *max_size,
2956 unsigned HOST_WIDE_INT *probable_max_size)
2958 if (CONST_INT_P (len_rtx))
2960 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2961 return;
2963 else
2965 wide_int min, max;
2966 enum value_range_type range_type = VR_UNDEFINED;
2968 /* Determine bounds from the type. */
2969 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2970 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2971 else
2972 *min_size = 0;
2973 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2974 *probable_max_size = *max_size
2975 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2976 else
2977 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2979 if (TREE_CODE (len) == SSA_NAME)
2980 range_type = get_range_info (len, &min, &max);
2981 if (range_type == VR_RANGE)
2983 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2984 *min_size = min.to_uhwi ();
2985 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2986 *probable_max_size = *max_size = max.to_uhwi ();
2988 else if (range_type == VR_ANTI_RANGE)
2990 /* Anti range 0...N lets us to determine minimal size to N+1. */
2991 if (min == 0)
2993 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2994 *min_size = max.to_uhwi () + 1;
2996 /* Code like
2998 int n;
2999 if (n < 100)
3000 memcpy (a, b, n)
3002 Produce anti range allowing negative values of N. We still
3003 can use the information and make a guess that N is not negative.
3005 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3006 *probable_max_size = min.to_uhwi () - 1;
3009 gcc_checking_assert (*max_size <=
3010 (unsigned HOST_WIDE_INT)
3011 GET_MODE_MASK (GET_MODE (len_rtx)));
3014 /* Try to verify that the sizes and lengths of the arguments to a string
3015 manipulation function given by EXP are within valid bounds and that
3016 the operation does not lead to buffer overflow or read past the end.
3017 Arguments other than EXP may be null. When non-null, the arguments
3018 have the following meaning:
3019 DST is the destination of a copy call or NULL otherwise.
3020 SRC is the source of a copy call or NULL otherwise.
3021 DSTWRITE is the number of bytes written into the destination obtained
3022 from the user-supplied size argument to the function (such as in
3023 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3024 MAXREAD is the user-supplied bound on the length of the source sequence
3025 (such as in strncat(d, s, N). It specifies the upper limit on the number
3026 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3027 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3028 expression EXP is a string function call (as opposed to a memory call
3029 like memcpy). As an exception, SRCSTR can also be an integer denoting
3030 the precomputed size of the source string or object (for functions like
3031 memcpy).
3032 DSTSIZE is the size of the destination object specified by the last
3033 argument to the _chk builtins, typically resulting from the expansion
3034 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3035 DSTSIZE).
3037 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3038 SIZE_MAX.
3040 If the call is successfully verified as safe return true, otherwise
3041 return false. */
3043 static bool
3044 check_access (tree exp, tree, tree, tree dstwrite,
3045 tree maxread, tree srcstr, tree dstsize)
3047 int opt = OPT_Wstringop_overflow_;
3049 /* The size of the largest object is half the address space, or
3050 PTRDIFF_MAX. (This is way too permissive.) */
3051 tree maxobjsize = max_object_size ();
3053 /* Either the length of the source string for string functions or
3054 the size of the source object for raw memory functions. */
3055 tree slen = NULL_TREE;
3057 tree range[2] = { NULL_TREE, NULL_TREE };
3059 /* Set to true when the exact number of bytes written by a string
3060 function like strcpy is not known and the only thing that is
3061 known is that it must be at least one (for the terminating nul). */
3062 bool at_least_one = false;
3063 if (srcstr)
3065 /* SRCSTR is normally a pointer to string but as a special case
3066 it can be an integer denoting the length of a string. */
3067 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3069 /* Try to determine the range of lengths the source string
3070 refers to. If it can be determined and is less than
3071 the upper bound given by MAXREAD add one to it for
3072 the terminating nul. Otherwise, set it to one for
3073 the same reason, or to MAXREAD as appropriate. */
3074 get_range_strlen (srcstr, range);
3075 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3077 if (maxread && tree_int_cst_le (maxread, range[0]))
3078 range[0] = range[1] = maxread;
3079 else
3080 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3081 range[0], size_one_node);
3083 if (maxread && tree_int_cst_le (maxread, range[1]))
3084 range[1] = maxread;
3085 else if (!integer_all_onesp (range[1]))
3086 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3087 range[1], size_one_node);
3089 slen = range[0];
3091 else
3093 at_least_one = true;
3094 slen = size_one_node;
3097 else
3098 slen = srcstr;
3101 if (!dstwrite && !maxread)
3103 /* When the only available piece of data is the object size
3104 there is nothing to do. */
3105 if (!slen)
3106 return true;
3108 /* Otherwise, when the length of the source sequence is known
3109 (as with strlen), set DSTWRITE to it. */
3110 if (!range[0])
3111 dstwrite = slen;
3114 if (!dstsize)
3115 dstsize = maxobjsize;
3117 if (dstwrite)
3118 get_size_range (dstwrite, range);
3120 tree func = get_callee_fndecl (exp);
3122 /* First check the number of bytes to be written against the maximum
3123 object size. */
3124 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3126 location_t loc = tree_nonartificial_location (exp);
3127 loc = expansion_point_location_if_in_system_header (loc);
3129 if (range[0] == range[1])
3130 warning_at (loc, opt,
3131 "%K%qD specified size %E "
3132 "exceeds maximum object size %E",
3133 exp, func, range[0], maxobjsize);
3134 else
3135 warning_at (loc, opt,
3136 "%K%qD specified size between %E and %E "
3137 "exceeds maximum object size %E",
3138 exp, func,
3139 range[0], range[1], maxobjsize);
3140 return false;
3143 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3144 constant, and in range of unsigned HOST_WIDE_INT. */
3145 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3147 /* Next check the number of bytes to be written against the destination
3148 object size. */
3149 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3151 if (range[0]
3152 && ((tree_fits_uhwi_p (dstsize)
3153 && tree_int_cst_lt (dstsize, range[0]))
3154 || (tree_fits_uhwi_p (dstwrite)
3155 && tree_int_cst_lt (dstwrite, range[0]))))
3157 if (TREE_NO_WARNING (exp))
3158 return false;
3160 location_t loc = tree_nonartificial_location (exp);
3161 loc = expansion_point_location_if_in_system_header (loc);
3163 if (dstwrite == slen && at_least_one)
3165 /* This is a call to strcpy with a destination of 0 size
3166 and a source of unknown length. The call will write
3167 at least one byte past the end of the destination. */
3168 warning_at (loc, opt,
3169 "%K%qD writing %E or more bytes into a region "
3170 "of size %E overflows the destination",
3171 exp, func, range[0], dstsize);
3173 else if (tree_int_cst_equal (range[0], range[1]))
3174 warning_at (loc, opt,
3175 (integer_onep (range[0])
3176 ? G_("%K%qD writing %E byte into a region "
3177 "of size %E overflows the destination")
3178 : G_("%K%qD writing %E bytes into a region "
3179 "of size %E overflows the destination")),
3180 exp, func, range[0], dstsize);
3181 else if (tree_int_cst_sign_bit (range[1]))
3183 /* Avoid printing the upper bound if it's invalid. */
3184 warning_at (loc, opt,
3185 "%K%qD writing %E or more bytes into a region "
3186 "of size %E overflows the destination",
3187 exp, func, range[0], dstsize);
3189 else
3190 warning_at (loc, opt,
3191 "%K%qD writing between %E and %E bytes into "
3192 "a region of size %E overflows the destination",
3193 exp, func, range[0], range[1],
3194 dstsize);
3196 /* Return error when an overflow has been detected. */
3197 return false;
3201 /* Check the maximum length of the source sequence against the size
3202 of the destination object if known, or against the maximum size
3203 of an object. */
3204 if (maxread)
3206 get_size_range (maxread, range);
3208 /* Use the lower end for MAXREAD from now on. */
3209 if (range[0])
3210 maxread = range[0];
3212 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3214 location_t loc = tree_nonartificial_location (exp);
3215 loc = expansion_point_location_if_in_system_header (loc);
3217 if (tree_int_cst_lt (maxobjsize, range[0]))
3219 if (TREE_NO_WARNING (exp))
3220 return false;
3222 /* Warn about crazy big sizes first since that's more
3223 likely to be meaningful than saying that the bound
3224 is greater than the object size if both are big. */
3225 if (range[0] == range[1])
3226 warning_at (loc, opt,
3227 "%K%qD specified bound %E "
3228 "exceeds maximum object size %E",
3229 exp, func,
3230 range[0], maxobjsize);
3231 else
3232 warning_at (loc, opt,
3233 "%K%qD specified bound between %E and %E "
3234 "exceeds maximum object size %E",
3235 exp, func,
3236 range[0], range[1], maxobjsize);
3238 return false;
3241 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3243 if (TREE_NO_WARNING (exp))
3244 return false;
3246 if (tree_int_cst_equal (range[0], range[1]))
3247 warning_at (loc, opt,
3248 "%K%qD specified bound %E "
3249 "exceeds destination size %E",
3250 exp, func,
3251 range[0], dstsize);
3252 else
3253 warning_at (loc, opt,
3254 "%K%qD specified bound between %E and %E "
3255 "exceeds destination size %E",
3256 exp, func,
3257 range[0], range[1], dstsize);
3258 return false;
3263 /* Check for reading past the end of SRC. */
3264 if (slen
3265 && slen == srcstr
3266 && dstwrite && range[0]
3267 && tree_int_cst_lt (slen, range[0]))
3269 if (TREE_NO_WARNING (exp))
3270 return false;
3272 location_t loc = tree_nonartificial_location (exp);
3274 if (tree_int_cst_equal (range[0], range[1]))
3275 warning_at (loc, opt,
3276 (tree_int_cst_equal (range[0], integer_one_node)
3277 ? G_("%K%qD reading %E byte from a region of size %E")
3278 : G_("%K%qD reading %E bytes from a region of size %E")),
3279 exp, func, range[0], slen);
3280 else if (tree_int_cst_sign_bit (range[1]))
3282 /* Avoid printing the upper bound if it's invalid. */
3283 warning_at (loc, opt,
3284 "%K%qD reading %E or more bytes from a region "
3285 "of size %E",
3286 exp, func, range[0], slen);
3288 else
3289 warning_at (loc, opt,
3290 "%K%qD reading between %E and %E bytes from a region "
3291 "of size %E",
3292 exp, func, range[0], range[1], slen);
3293 return false;
3296 return true;
3299 /* Helper to compute the size of the object referenced by the DEST
3300 expression which must have pointer type, using Object Size type
3301 OSTYPE (only the least significant 2 bits are used). Return
3302 an estimate of the size of the object if successful or NULL when
3303 the size cannot be determined. When the referenced object involves
3304 a non-constant offset in some range the returned value represents
3305 the largest size given the smallest non-negative offset in the
3306 range. The function is intended for diagnostics and should not
3307 be used to influence code generation or optimization. */
3309 tree
3310 compute_objsize (tree dest, int ostype)
3312 unsigned HOST_WIDE_INT size;
3314 /* Only the two least significant bits are meaningful. */
3315 ostype &= 3;
3317 if (compute_builtin_object_size (dest, ostype, &size))
3318 return build_int_cst (sizetype, size);
3320 if (TREE_CODE (dest) == SSA_NAME)
3322 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3323 if (!is_gimple_assign (stmt))
3324 return NULL_TREE;
3326 dest = gimple_assign_rhs1 (stmt);
3328 tree_code code = gimple_assign_rhs_code (stmt);
3329 if (code == POINTER_PLUS_EXPR)
3331 /* compute_builtin_object_size fails for addresses with
3332 non-constant offsets. Try to determine the range of
3333 such an offset here and use it to adjus the constant
3334 size. */
3335 tree off = gimple_assign_rhs2 (stmt);
3336 if (TREE_CODE (off) == SSA_NAME
3337 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3339 wide_int min, max;
3340 enum value_range_type rng = get_range_info (off, &min, &max);
3342 if (rng == VR_RANGE)
3344 if (tree size = compute_objsize (dest, ostype))
3346 wide_int wisiz = wi::to_wide (size);
3348 /* Ignore negative offsets for now. For others,
3349 use the lower bound as the most optimistic
3350 estimate of the (remaining)size. */
3351 if (wi::sign_mask (min))
3353 else if (wi::ltu_p (min, wisiz))
3354 return wide_int_to_tree (TREE_TYPE (size),
3355 wi::sub (wisiz, min));
3356 else
3357 return size_zero_node;
3362 else if (code != ADDR_EXPR)
3363 return NULL_TREE;
3366 /* Unless computing the largest size (for memcpy and other raw memory
3367 functions), try to determine the size of the object from its type. */
3368 if (!ostype)
3369 return NULL_TREE;
3371 if (TREE_CODE (dest) != ADDR_EXPR)
3372 return NULL_TREE;
3374 tree type = TREE_TYPE (dest);
3375 if (TREE_CODE (type) == POINTER_TYPE)
3376 type = TREE_TYPE (type);
3378 type = TYPE_MAIN_VARIANT (type);
3380 if (TREE_CODE (type) == ARRAY_TYPE
3381 && !array_at_struct_end_p (dest))
3383 /* Return the constant size unless it's zero (that's a zero-length
3384 array likely at the end of a struct). */
3385 tree size = TYPE_SIZE_UNIT (type);
3386 if (size && TREE_CODE (size) == INTEGER_CST
3387 && !integer_zerop (size))
3388 return size;
3391 return NULL_TREE;
3394 /* Helper to determine and check the sizes of the source and the destination
3395 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3396 call expression, DEST is the destination argument, SRC is the source
3397 argument or null, and LEN is the number of bytes. Use Object Size type-0
3398 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3399 (no overflow or invalid sizes), false otherwise. */
3401 static bool
3402 check_memop_access (tree exp, tree dest, tree src, tree size)
3404 /* For functions like memset and memcpy that operate on raw memory
3405 try to determine the size of the largest source and destination
3406 object using type-0 Object Size regardless of the object size
3407 type specified by the option. */
3408 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3409 tree dstsize = compute_objsize (dest, 0);
3411 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3412 srcsize, dstsize);
3415 /* Validate memchr arguments without performing any expansion.
3416 Return NULL_RTX. */
3418 static rtx
3419 expand_builtin_memchr (tree exp, rtx)
3421 if (!validate_arglist (exp,
3422 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3423 return NULL_RTX;
3425 tree arg1 = CALL_EXPR_ARG (exp, 0);
3426 tree len = CALL_EXPR_ARG (exp, 2);
3428 /* Diagnose calls where the specified length exceeds the size
3429 of the object. */
3430 if (warn_stringop_overflow)
3432 tree size = compute_objsize (arg1, 0);
3433 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3434 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3437 return NULL_RTX;
3440 /* Expand a call EXP to the memcpy builtin.
3441 Return NULL_RTX if we failed, the caller should emit a normal call,
3442 otherwise try to get the result in TARGET, if convenient (and in
3443 mode MODE if that's convenient). */
3445 static rtx
3446 expand_builtin_memcpy (tree exp, rtx target)
3448 if (!validate_arglist (exp,
3449 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3450 return NULL_RTX;
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3456 check_memop_access (exp, dest, src, len);
3458 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3459 /*endp=*/ 0);
3462 /* Check a call EXP to the memmove built-in for validity.
3463 Return NULL_RTX on both success and failure. */
3465 static rtx
3466 expand_builtin_memmove (tree exp, rtx)
3468 if (!validate_arglist (exp,
3469 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3470 return NULL_RTX;
3472 tree dest = CALL_EXPR_ARG (exp, 0);
3473 tree src = CALL_EXPR_ARG (exp, 1);
3474 tree len = CALL_EXPR_ARG (exp, 2);
3476 check_memop_access (exp, dest, src, len);
3478 return NULL_RTX;
3481 /* Expand an instrumented call EXP to the memcpy builtin.
3482 Return NULL_RTX if we failed, the caller should emit a normal call,
3483 otherwise try to get the result in TARGET, if convenient (and in
3484 mode MODE if that's convenient). */
3486 static rtx
3487 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3489 if (!validate_arglist (exp,
3490 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3491 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3492 INTEGER_TYPE, VOID_TYPE))
3493 return NULL_RTX;
3494 else
3496 tree dest = CALL_EXPR_ARG (exp, 0);
3497 tree src = CALL_EXPR_ARG (exp, 2);
3498 tree len = CALL_EXPR_ARG (exp, 4);
3499 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3500 /*end_p=*/ 0);
3502 /* Return src bounds with the result. */
3503 if (res)
3505 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3506 expand_normal (CALL_EXPR_ARG (exp, 1)));
3507 res = chkp_join_splitted_slot (res, bnd);
3509 return res;
3513 /* Expand a call EXP to the mempcpy builtin.
3514 Return NULL_RTX if we failed; the caller should emit a normal call,
3515 otherwise try to get the result in TARGET, if convenient (and in
3516 mode MODE if that's convenient). If ENDP is 0 return the
3517 destination pointer, if ENDP is 1 return the end pointer ala
3518 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3519 stpcpy. */
3521 static rtx
3522 expand_builtin_mempcpy (tree exp, rtx target)
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3528 tree dest = CALL_EXPR_ARG (exp, 0);
3529 tree src = CALL_EXPR_ARG (exp, 1);
3530 tree len = CALL_EXPR_ARG (exp, 2);
3532 /* Policy does not generally allow using compute_objsize (which
3533 is used internally by check_memop_size) to change code generation
3534 or drive optimization decisions.
3536 In this instance it is safe because the code we generate has
3537 the same semantics regardless of the return value of
3538 check_memop_sizes. Exactly the same amount of data is copied
3539 and the return value is exactly the same in both cases.
3541 Furthermore, check_memop_size always uses mode 0 for the call to
3542 compute_objsize, so the imprecise nature of compute_objsize is
3543 avoided. */
3545 /* Avoid expanding mempcpy into memcpy when the call is determined
3546 to overflow the buffer. This also prevents the same overflow
3547 from being diagnosed again when expanding memcpy. */
3548 if (!check_memop_access (exp, dest, src, len))
3549 return NULL_RTX;
3551 return expand_builtin_mempcpy_args (dest, src, len,
3552 target, exp, /*endp=*/ 1);
3555 /* Expand an instrumented call EXP to the mempcpy builtin.
3556 Return NULL_RTX if we failed, the caller should emit a normal call,
3557 otherwise try to get the result in TARGET, if convenient (and in
3558 mode MODE if that's convenient). */
3560 static rtx
3561 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3563 if (!validate_arglist (exp,
3564 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3565 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3566 INTEGER_TYPE, VOID_TYPE))
3567 return NULL_RTX;
3568 else
3570 tree dest = CALL_EXPR_ARG (exp, 0);
3571 tree src = CALL_EXPR_ARG (exp, 2);
3572 tree len = CALL_EXPR_ARG (exp, 4);
3573 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3574 exp, 1);
3576 /* Return src bounds with the result. */
3577 if (res)
3579 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3580 expand_normal (CALL_EXPR_ARG (exp, 1)));
3581 res = chkp_join_splitted_slot (res, bnd);
3583 return res;
3587 /* Helper function to do the actual work for expand of memory copy family
3588 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3589 of memory from SRC to DEST and assign to TARGET if convenient.
3590 If ENDP is 0 return the
3591 destination pointer, if ENDP is 1 return the end pointer ala
3592 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3593 stpcpy. */
3595 static rtx
3596 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3597 rtx target, tree exp, int endp)
3599 const char *src_str;
3600 unsigned int src_align = get_pointer_alignment (src);
3601 unsigned int dest_align = get_pointer_alignment (dest);
3602 rtx dest_mem, src_mem, dest_addr, len_rtx;
3603 HOST_WIDE_INT expected_size = -1;
3604 unsigned int expected_align = 0;
3605 unsigned HOST_WIDE_INT min_size;
3606 unsigned HOST_WIDE_INT max_size;
3607 unsigned HOST_WIDE_INT probable_max_size;
3609 /* If DEST is not a pointer type, call the normal function. */
3610 if (dest_align == 0)
3611 return NULL_RTX;
3613 /* If either SRC is not a pointer type, don't do this
3614 operation in-line. */
3615 if (src_align == 0)
3616 return NULL_RTX;
3618 if (currently_expanding_gimple_stmt)
3619 stringop_block_profile (currently_expanding_gimple_stmt,
3620 &expected_align, &expected_size);
3622 if (expected_align < dest_align)
3623 expected_align = dest_align;
3624 dest_mem = get_memory_rtx (dest, len);
3625 set_mem_align (dest_mem, dest_align);
3626 len_rtx = expand_normal (len);
3627 determine_block_size (len, len_rtx, &min_size, &max_size,
3628 &probable_max_size);
3629 src_str = c_getstr (src);
3631 /* If SRC is a string constant and block move would be done
3632 by pieces, we can avoid loading the string from memory
3633 and only stored the computed constants. */
3634 if (src_str
3635 && CONST_INT_P (len_rtx)
3636 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3637 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3638 CONST_CAST (char *, src_str),
3639 dest_align, false))
3641 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3642 builtin_memcpy_read_str,
3643 CONST_CAST (char *, src_str),
3644 dest_align, false, endp);
3645 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3646 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3647 return dest_mem;
3650 src_mem = get_memory_rtx (src, len);
3651 set_mem_align (src_mem, src_align);
3653 /* Copy word part most expediently. */
3654 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3655 CALL_EXPR_TAILCALL (exp)
3656 && (endp == 0 || target == const0_rtx)
3657 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3658 expected_align, expected_size,
3659 min_size, max_size, probable_max_size);
3661 if (dest_addr == 0)
3663 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3664 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3667 if (endp && target != const0_rtx)
3669 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3670 /* stpcpy pointer to last byte. */
3671 if (endp == 2)
3672 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3675 return dest_addr;
3678 static rtx
3679 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3680 rtx target, tree orig_exp, int endp)
3682 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3683 endp);
3686 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3687 we failed, the caller should emit a normal call, otherwise try to
3688 get the result in TARGET, if convenient. If ENDP is 0 return the
3689 destination pointer, if ENDP is 1 return the end pointer ala
3690 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3691 stpcpy. */
3693 static rtx
3694 expand_movstr (tree dest, tree src, rtx target, int endp)
3696 struct expand_operand ops[3];
3697 rtx dest_mem;
3698 rtx src_mem;
3700 if (!targetm.have_movstr ())
3701 return NULL_RTX;
3703 dest_mem = get_memory_rtx (dest, NULL);
3704 src_mem = get_memory_rtx (src, NULL);
3705 if (!endp)
3707 target = force_reg (Pmode, XEXP (dest_mem, 0));
3708 dest_mem = replace_equiv_address (dest_mem, target);
3711 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3712 create_fixed_operand (&ops[1], dest_mem);
3713 create_fixed_operand (&ops[2], src_mem);
3714 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3715 return NULL_RTX;
3717 if (endp && target != const0_rtx)
3719 target = ops[0].value;
3720 /* movstr is supposed to set end to the address of the NUL
3721 terminator. If the caller requested a mempcpy-like return value,
3722 adjust it. */
3723 if (endp == 1)
3725 rtx tem = plus_constant (GET_MODE (target),
3726 gen_lowpart (GET_MODE (target), target), 1);
3727 emit_move_insn (target, force_operand (tem, NULL_RTX));
3730 return target;
3733 /* Do some very basic size validation of a call to the strcpy builtin
3734 given by EXP. Return NULL_RTX to have the built-in expand to a call
3735 to the library function. */
3737 static rtx
3738 expand_builtin_strcat (tree exp, rtx)
3740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3741 || !warn_stringop_overflow)
3742 return NULL_RTX;
3744 tree dest = CALL_EXPR_ARG (exp, 0);
3745 tree src = CALL_EXPR_ARG (exp, 1);
3747 /* There is no way here to determine the length of the string in
3748 the destination to which the SRC string is being appended so
3749 just diagnose cases when the souce string is longer than
3750 the destination object. */
3752 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3754 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3755 destsize);
3757 return NULL_RTX;
3760 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3761 NULL_RTX if we failed the caller should emit a normal call, otherwise
3762 try to get the result in TARGET, if convenient (and in mode MODE if that's
3763 convenient). */
3765 static rtx
3766 expand_builtin_strcpy (tree exp, rtx target)
3768 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3769 return NULL_RTX;
3771 tree dest = CALL_EXPR_ARG (exp, 0);
3772 tree src = CALL_EXPR_ARG (exp, 1);
3774 if (warn_stringop_overflow)
3776 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3777 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3778 src, destsize);
3781 return expand_builtin_strcpy_args (dest, src, target);
3784 /* Helper function to do the actual work for expand_builtin_strcpy. The
3785 arguments to the builtin_strcpy call DEST and SRC are broken out
3786 so that this can also be called without constructing an actual CALL_EXPR.
3787 The other arguments and return value are the same as for
3788 expand_builtin_strcpy. */
3790 static rtx
3791 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3793 return expand_movstr (dest, src, target, /*endp=*/0);
3796 /* Expand a call EXP to the stpcpy builtin.
3797 Return NULL_RTX if we failed the caller should emit a normal call,
3798 otherwise try to get the result in TARGET, if convenient (and in
3799 mode MODE if that's convenient). */
3801 static rtx
3802 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3804 tree dst, src;
3805 location_t loc = EXPR_LOCATION (exp);
3807 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3808 return NULL_RTX;
3810 dst = CALL_EXPR_ARG (exp, 0);
3811 src = CALL_EXPR_ARG (exp, 1);
3813 if (warn_stringop_overflow)
3815 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3816 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3817 src, destsize);
3820 /* If return value is ignored, transform stpcpy into strcpy. */
3821 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3823 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3824 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3825 return expand_expr (result, target, mode, EXPAND_NORMAL);
3827 else
3829 tree len, lenp1;
3830 rtx ret;
3832 /* Ensure we get an actual string whose length can be evaluated at
3833 compile-time, not an expression containing a string. This is
3834 because the latter will potentially produce pessimized code
3835 when used to produce the return value. */
3836 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3837 return expand_movstr (dst, src, target, /*endp=*/2);
3839 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3840 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3841 target, exp, /*endp=*/2);
3843 if (ret)
3844 return ret;
3846 if (TREE_CODE (len) == INTEGER_CST)
3848 rtx len_rtx = expand_normal (len);
3850 if (CONST_INT_P (len_rtx))
3852 ret = expand_builtin_strcpy_args (dst, src, target);
3854 if (ret)
3856 if (! target)
3858 if (mode != VOIDmode)
3859 target = gen_reg_rtx (mode);
3860 else
3861 target = gen_reg_rtx (GET_MODE (ret));
3863 if (GET_MODE (target) != GET_MODE (ret))
3864 ret = gen_lowpart (GET_MODE (target), ret);
3866 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3867 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3868 gcc_assert (ret);
3870 return target;
3875 return expand_movstr (dst, src, target, /*endp=*/2);
3879 /* Check a call EXP to the stpncpy built-in for validity.
3880 Return NULL_RTX on both success and failure. */
3882 static rtx
3883 expand_builtin_stpncpy (tree exp, rtx)
3885 if (!validate_arglist (exp,
3886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3887 || !warn_stringop_overflow)
3888 return NULL_RTX;
3890 /* The source and destination of the call. */
3891 tree dest = CALL_EXPR_ARG (exp, 0);
3892 tree src = CALL_EXPR_ARG (exp, 1);
3894 /* The exact number of bytes to write (not the maximum). */
3895 tree len = CALL_EXPR_ARG (exp, 2);
3897 /* The size of the destination object. */
3898 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3900 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3902 return NULL_RTX;
3905 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3906 bytes from constant string DATA + OFFSET and return it as target
3907 constant. */
3910 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3911 scalar_int_mode mode)
3913 const char *str = (const char *) data;
3915 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3916 return const0_rtx;
3918 return c_readstr (str + offset, mode);
3921 /* Helper to check the sizes of sequences and the destination of calls
3922 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3923 success (no overflow or invalid sizes), false otherwise. */
3925 static bool
3926 check_strncat_sizes (tree exp, tree objsize)
3928 tree dest = CALL_EXPR_ARG (exp, 0);
3929 tree src = CALL_EXPR_ARG (exp, 1);
3930 tree maxread = CALL_EXPR_ARG (exp, 2);
3932 /* Try to determine the range of lengths that the source expression
3933 refers to. */
3934 tree lenrange[2];
3935 get_range_strlen (src, lenrange);
3937 /* Try to verify that the destination is big enough for the shortest
3938 string. */
3940 if (!objsize && warn_stringop_overflow)
3942 /* If it hasn't been provided by __strncat_chk, try to determine
3943 the size of the destination object into which the source is
3944 being copied. */
3945 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3948 /* Add one for the terminating nul. */
3949 tree srclen = (lenrange[0]
3950 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3951 size_one_node)
3952 : NULL_TREE);
3954 /* The strncat function copies at most MAXREAD bytes and always appends
3955 the terminating nul so the specified upper bound should never be equal
3956 to (or greater than) the size of the destination. */
3957 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3958 && tree_int_cst_equal (objsize, maxread))
3960 location_t loc = tree_nonartificial_location (exp);
3961 loc = expansion_point_location_if_in_system_header (loc);
3963 warning_at (loc, OPT_Wstringop_overflow_,
3964 "%K%qD specified bound %E equals destination size",
3965 exp, get_callee_fndecl (exp), maxread);
3967 return false;
3970 if (!srclen
3971 || (maxread && tree_fits_uhwi_p (maxread)
3972 && tree_fits_uhwi_p (srclen)
3973 && tree_int_cst_lt (maxread, srclen)))
3974 srclen = maxread;
3976 /* The number of bytes to write is LEN but check_access will also
3977 check SRCLEN if LEN's value isn't known. */
3978 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3979 objsize);
3982 /* Similar to expand_builtin_strcat, do some very basic size validation
3983 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3984 the built-in expand to a call to the library function. */
3986 static rtx
3987 expand_builtin_strncat (tree exp, rtx)
3989 if (!validate_arglist (exp,
3990 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3991 || !warn_stringop_overflow)
3992 return NULL_RTX;
3994 tree dest = CALL_EXPR_ARG (exp, 0);
3995 tree src = CALL_EXPR_ARG (exp, 1);
3996 /* The upper bound on the number of bytes to write. */
3997 tree maxread = CALL_EXPR_ARG (exp, 2);
3998 /* The length of the source sequence. */
3999 tree slen = c_strlen (src, 1);
4001 /* Try to determine the range of lengths that the source expression
4002 refers to. */
4003 tree lenrange[2];
4004 if (slen)
4005 lenrange[0] = lenrange[1] = slen;
4006 else
4007 get_range_strlen (src, lenrange);
4009 /* Try to verify that the destination is big enough for the shortest
4010 string. First try to determine the size of the destination object
4011 into which the source is being copied. */
4012 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4014 /* Add one for the terminating nul. */
4015 tree srclen = (lenrange[0]
4016 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4017 size_one_node)
4018 : NULL_TREE);
4020 /* The strncat function copies at most MAXREAD bytes and always appends
4021 the terminating nul so the specified upper bound should never be equal
4022 to (or greater than) the size of the destination. */
4023 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4024 && tree_int_cst_equal (destsize, maxread))
4026 location_t loc = tree_nonartificial_location (exp);
4027 loc = expansion_point_location_if_in_system_header (loc);
4029 warning_at (loc, OPT_Wstringop_overflow_,
4030 "%K%qD specified bound %E equals destination size",
4031 exp, get_callee_fndecl (exp), maxread);
4033 return NULL_RTX;
4036 if (!srclen
4037 || (maxread && tree_fits_uhwi_p (maxread)
4038 && tree_fits_uhwi_p (srclen)
4039 && tree_int_cst_lt (maxread, srclen)))
4040 srclen = maxread;
4042 /* The number of bytes to write is SRCLEN. */
4043 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4045 return NULL_RTX;
4048 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4049 NULL_RTX if we failed the caller should emit a normal call. */
4051 static rtx
4052 expand_builtin_strncpy (tree exp, rtx target)
4054 location_t loc = EXPR_LOCATION (exp);
4056 if (validate_arglist (exp,
4057 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4059 tree dest = CALL_EXPR_ARG (exp, 0);
4060 tree src = CALL_EXPR_ARG (exp, 1);
4061 /* The number of bytes to write (not the maximum). */
4062 tree len = CALL_EXPR_ARG (exp, 2);
4063 /* The length of the source sequence. */
4064 tree slen = c_strlen (src, 1);
4066 if (warn_stringop_overflow)
4068 tree destsize = compute_objsize (dest,
4069 warn_stringop_overflow - 1);
4071 /* The number of bytes to write is LEN but check_access will also
4072 check SLEN if LEN's value isn't known. */
4073 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4074 destsize);
4077 /* We must be passed a constant len and src parameter. */
4078 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4079 return NULL_RTX;
4081 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4083 /* We're required to pad with trailing zeros if the requested
4084 len is greater than strlen(s2)+1. In that case try to
4085 use store_by_pieces, if it fails, punt. */
4086 if (tree_int_cst_lt (slen, len))
4088 unsigned int dest_align = get_pointer_alignment (dest);
4089 const char *p = c_getstr (src);
4090 rtx dest_mem;
4092 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4093 || !can_store_by_pieces (tree_to_uhwi (len),
4094 builtin_strncpy_read_str,
4095 CONST_CAST (char *, p),
4096 dest_align, false))
4097 return NULL_RTX;
4099 dest_mem = get_memory_rtx (dest, len);
4100 store_by_pieces (dest_mem, tree_to_uhwi (len),
4101 builtin_strncpy_read_str,
4102 CONST_CAST (char *, p), dest_align, false, 0);
4103 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4104 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4105 return dest_mem;
4108 return NULL_RTX;
4111 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4112 bytes from constant string DATA + OFFSET and return it as target
4113 constant. */
4116 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4117 scalar_int_mode mode)
4119 const char *c = (const char *) data;
4120 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4122 memset (p, *c, GET_MODE_SIZE (mode));
4124 return c_readstr (p, mode);
4127 /* Callback routine for store_by_pieces. Return the RTL of a register
4128 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4129 char value given in the RTL register data. For example, if mode is
4130 4 bytes wide, return the RTL for 0x01010101*data. */
4132 static rtx
4133 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4134 scalar_int_mode mode)
4136 rtx target, coeff;
4137 size_t size;
4138 char *p;
4140 size = GET_MODE_SIZE (mode);
4141 if (size == 1)
4142 return (rtx) data;
4144 p = XALLOCAVEC (char, size);
4145 memset (p, 1, size);
4146 coeff = c_readstr (p, mode);
4148 target = convert_to_mode (mode, (rtx) data, 1);
4149 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4150 return force_reg (mode, target);
4153 /* Expand expression EXP, which is a call to the memset builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise
4155 try to get the result in TARGET, if convenient (and in mode MODE if that's
4156 convenient). */
4158 static rtx
4159 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4161 if (!validate_arglist (exp,
4162 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4163 return NULL_RTX;
4165 tree dest = CALL_EXPR_ARG (exp, 0);
4166 tree val = CALL_EXPR_ARG (exp, 1);
4167 tree len = CALL_EXPR_ARG (exp, 2);
4169 check_memop_access (exp, dest, NULL_TREE, len);
4171 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4174 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4175 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4176 try to get the result in TARGET, if convenient (and in mode MODE if that's
4177 convenient). */
4179 static rtx
4180 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4182 if (!validate_arglist (exp,
4183 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4184 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4185 return NULL_RTX;
4186 else
4188 tree dest = CALL_EXPR_ARG (exp, 0);
4189 tree val = CALL_EXPR_ARG (exp, 2);
4190 tree len = CALL_EXPR_ARG (exp, 3);
4191 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4193 /* Return src bounds with the result. */
4194 if (res)
4196 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4197 expand_normal (CALL_EXPR_ARG (exp, 1)));
4198 res = chkp_join_splitted_slot (res, bnd);
4200 return res;
4204 /* Helper function to do the actual work for expand_builtin_memset. The
4205 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4206 so that this can also be called without constructing an actual CALL_EXPR.
4207 The other arguments and return value are the same as for
4208 expand_builtin_memset. */
4210 static rtx
4211 expand_builtin_memset_args (tree dest, tree val, tree len,
4212 rtx target, machine_mode mode, tree orig_exp)
4214 tree fndecl, fn;
4215 enum built_in_function fcode;
4216 machine_mode val_mode;
4217 char c;
4218 unsigned int dest_align;
4219 rtx dest_mem, dest_addr, len_rtx;
4220 HOST_WIDE_INT expected_size = -1;
4221 unsigned int expected_align = 0;
4222 unsigned HOST_WIDE_INT min_size;
4223 unsigned HOST_WIDE_INT max_size;
4224 unsigned HOST_WIDE_INT probable_max_size;
4226 dest_align = get_pointer_alignment (dest);
4228 /* If DEST is not a pointer type, don't do this operation in-line. */
4229 if (dest_align == 0)
4230 return NULL_RTX;
4232 if (currently_expanding_gimple_stmt)
4233 stringop_block_profile (currently_expanding_gimple_stmt,
4234 &expected_align, &expected_size);
4236 if (expected_align < dest_align)
4237 expected_align = dest_align;
4239 /* If the LEN parameter is zero, return DEST. */
4240 if (integer_zerop (len))
4242 /* Evaluate and ignore VAL in case it has side-effects. */
4243 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4244 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4247 /* Stabilize the arguments in case we fail. */
4248 dest = builtin_save_expr (dest);
4249 val = builtin_save_expr (val);
4250 len = builtin_save_expr (len);
4252 len_rtx = expand_normal (len);
4253 determine_block_size (len, len_rtx, &min_size, &max_size,
4254 &probable_max_size);
4255 dest_mem = get_memory_rtx (dest, len);
4256 val_mode = TYPE_MODE (unsigned_char_type_node);
4258 if (TREE_CODE (val) != INTEGER_CST)
4260 rtx val_rtx;
4262 val_rtx = expand_normal (val);
4263 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4265 /* Assume that we can memset by pieces if we can store
4266 * the coefficients by pieces (in the required modes).
4267 * We can't pass builtin_memset_gen_str as that emits RTL. */
4268 c = 1;
4269 if (tree_fits_uhwi_p (len)
4270 && can_store_by_pieces (tree_to_uhwi (len),
4271 builtin_memset_read_str, &c, dest_align,
4272 true))
4274 val_rtx = force_reg (val_mode, val_rtx);
4275 store_by_pieces (dest_mem, tree_to_uhwi (len),
4276 builtin_memset_gen_str, val_rtx, dest_align,
4277 true, 0);
4279 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4280 dest_align, expected_align,
4281 expected_size, min_size, max_size,
4282 probable_max_size))
4283 goto do_libcall;
4285 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4286 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4287 return dest_mem;
4290 if (target_char_cast (val, &c))
4291 goto do_libcall;
4293 if (c)
4295 if (tree_fits_uhwi_p (len)
4296 && can_store_by_pieces (tree_to_uhwi (len),
4297 builtin_memset_read_str, &c, dest_align,
4298 true))
4299 store_by_pieces (dest_mem, tree_to_uhwi (len),
4300 builtin_memset_read_str, &c, dest_align, true, 0);
4301 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4302 gen_int_mode (c, val_mode),
4303 dest_align, expected_align,
4304 expected_size, min_size, max_size,
4305 probable_max_size))
4306 goto do_libcall;
4308 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4309 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4310 return dest_mem;
4313 set_mem_align (dest_mem, dest_align);
4314 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4315 CALL_EXPR_TAILCALL (orig_exp)
4316 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4317 expected_align, expected_size,
4318 min_size, max_size,
4319 probable_max_size);
4321 if (dest_addr == 0)
4323 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4324 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4327 return dest_addr;
4329 do_libcall:
4330 fndecl = get_callee_fndecl (orig_exp);
4331 fcode = DECL_FUNCTION_CODE (fndecl);
4332 if (fcode == BUILT_IN_MEMSET
4333 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4334 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4335 dest, val, len);
4336 else if (fcode == BUILT_IN_BZERO)
4337 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4338 dest, len);
4339 else
4340 gcc_unreachable ();
4341 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4342 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4343 return expand_call (fn, target, target == const0_rtx);
4346 /* Expand expression EXP, which is a call to the bzero builtin. Return
4347 NULL_RTX if we failed the caller should emit a normal call. */
4349 static rtx
4350 expand_builtin_bzero (tree exp)
4352 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4353 return NULL_RTX;
4355 tree dest = CALL_EXPR_ARG (exp, 0);
4356 tree size = CALL_EXPR_ARG (exp, 1);
4358 check_memop_access (exp, dest, NULL_TREE, size);
4360 /* New argument list transforming bzero(ptr x, int y) to
4361 memset(ptr x, int 0, size_t y). This is done this way
4362 so that if it isn't expanded inline, we fallback to
4363 calling bzero instead of memset. */
4365 location_t loc = EXPR_LOCATION (exp);
4367 return expand_builtin_memset_args (dest, integer_zero_node,
4368 fold_convert_loc (loc,
4369 size_type_node, size),
4370 const0_rtx, VOIDmode, exp);
4373 /* Try to expand cmpstr operation ICODE with the given operands.
4374 Return the result rtx on success, otherwise return null. */
4376 static rtx
4377 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4378 HOST_WIDE_INT align)
4380 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4382 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4383 target = NULL_RTX;
4385 struct expand_operand ops[4];
4386 create_output_operand (&ops[0], target, insn_mode);
4387 create_fixed_operand (&ops[1], arg1_rtx);
4388 create_fixed_operand (&ops[2], arg2_rtx);
4389 create_integer_operand (&ops[3], align);
4390 if (maybe_expand_insn (icode, 4, ops))
4391 return ops[0].value;
4392 return NULL_RTX;
4395 /* Expand expression EXP, which is a call to the memcmp built-in function.
4396 Return NULL_RTX if we failed and the caller should emit a normal call,
4397 otherwise try to get the result in TARGET, if convenient.
4398 RESULT_EQ is true if we can relax the returned value to be either zero
4399 or nonzero, without caring about the sign. */
4401 static rtx
4402 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4404 if (!validate_arglist (exp,
4405 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4406 return NULL_RTX;
4408 tree arg1 = CALL_EXPR_ARG (exp, 0);
4409 tree arg2 = CALL_EXPR_ARG (exp, 1);
4410 tree len = CALL_EXPR_ARG (exp, 2);
4412 /* Diagnose calls where the specified length exceeds the size of either
4413 object. */
4414 if (warn_stringop_overflow)
4416 tree size = compute_objsize (arg1, 0);
4417 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4418 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4420 size = compute_objsize (arg2, 0);
4421 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4422 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4426 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4427 location_t loc = EXPR_LOCATION (exp);
4429 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4430 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4432 /* If we don't have POINTER_TYPE, call the function. */
4433 if (arg1_align == 0 || arg2_align == 0)
4434 return NULL_RTX;
4436 rtx arg1_rtx = get_memory_rtx (arg1, len);
4437 rtx arg2_rtx = get_memory_rtx (arg2, len);
4438 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4440 /* Set MEM_SIZE as appropriate. */
4441 if (CONST_INT_P (len_rtx))
4443 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4444 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4447 by_pieces_constfn constfn = NULL;
4449 const char *src_str = c_getstr (arg2);
4450 if (result_eq && src_str == NULL)
4452 src_str = c_getstr (arg1);
4453 if (src_str != NULL)
4454 std::swap (arg1_rtx, arg2_rtx);
4457 /* If SRC is a string constant and block move would be done
4458 by pieces, we can avoid loading the string from memory
4459 and only stored the computed constants. */
4460 if (src_str
4461 && CONST_INT_P (len_rtx)
4462 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4463 constfn = builtin_memcpy_read_str;
4465 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4466 TREE_TYPE (len), target,
4467 result_eq, constfn,
4468 CONST_CAST (char *, src_str));
4470 if (result)
4472 /* Return the value in the proper mode for this function. */
4473 if (GET_MODE (result) == mode)
4474 return result;
4476 if (target != 0)
4478 convert_move (target, result, 0);
4479 return target;
4482 return convert_to_mode (mode, result, 0);
4485 return NULL_RTX;
4488 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4489 if we failed the caller should emit a normal call, otherwise try to get
4490 the result in TARGET, if convenient. */
4492 static rtx
4493 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4496 return NULL_RTX;
4498 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4499 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4500 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4501 return NULL_RTX;
4503 tree arg1 = CALL_EXPR_ARG (exp, 0);
4504 tree arg2 = CALL_EXPR_ARG (exp, 1);
4506 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4507 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4509 /* If we don't have POINTER_TYPE, call the function. */
4510 if (arg1_align == 0 || arg2_align == 0)
4511 return NULL_RTX;
4513 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4514 arg1 = builtin_save_expr (arg1);
4515 arg2 = builtin_save_expr (arg2);
4517 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4518 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4520 rtx result = NULL_RTX;
4521 /* Try to call cmpstrsi. */
4522 if (cmpstr_icode != CODE_FOR_nothing)
4523 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4524 MIN (arg1_align, arg2_align));
4526 /* Try to determine at least one length and call cmpstrnsi. */
4527 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4529 tree len;
4530 rtx arg3_rtx;
4532 tree len1 = c_strlen (arg1, 1);
4533 tree len2 = c_strlen (arg2, 1);
4535 if (len1)
4536 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4537 if (len2)
4538 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4540 /* If we don't have a constant length for the first, use the length
4541 of the second, if we know it. We don't require a constant for
4542 this case; some cost analysis could be done if both are available
4543 but neither is constant. For now, assume they're equally cheap,
4544 unless one has side effects. If both strings have constant lengths,
4545 use the smaller. */
4547 if (!len1)
4548 len = len2;
4549 else if (!len2)
4550 len = len1;
4551 else if (TREE_SIDE_EFFECTS (len1))
4552 len = len2;
4553 else if (TREE_SIDE_EFFECTS (len2))
4554 len = len1;
4555 else if (TREE_CODE (len1) != INTEGER_CST)
4556 len = len2;
4557 else if (TREE_CODE (len2) != INTEGER_CST)
4558 len = len1;
4559 else if (tree_int_cst_lt (len1, len2))
4560 len = len1;
4561 else
4562 len = len2;
4564 /* If both arguments have side effects, we cannot optimize. */
4565 if (len && !TREE_SIDE_EFFECTS (len))
4567 arg3_rtx = expand_normal (len);
4568 result = expand_cmpstrn_or_cmpmem
4569 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4570 arg3_rtx, MIN (arg1_align, arg2_align));
4574 /* Check to see if the argument was declared attribute nonstring
4575 and if so, issue a warning since at this point it's not known
4576 to be nul-terminated. */
4577 tree fndecl = get_callee_fndecl (exp);
4578 maybe_warn_nonstring_arg (fndecl, exp);
4580 if (result)
4582 /* Return the value in the proper mode for this function. */
4583 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4584 if (GET_MODE (result) == mode)
4585 return result;
4586 if (target == 0)
4587 return convert_to_mode (mode, result, 0);
4588 convert_move (target, result, 0);
4589 return target;
4592 /* Expand the library call ourselves using a stabilized argument
4593 list to avoid re-evaluating the function's arguments twice. */
4594 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4595 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4596 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4597 return expand_call (fn, target, target == const0_rtx);
4600 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4601 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4602 the result in TARGET, if convenient. */
4604 static rtx
4605 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4606 ATTRIBUTE_UNUSED machine_mode mode)
4608 if (!validate_arglist (exp,
4609 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4610 return NULL_RTX;
4612 /* If c_strlen can determine an expression for one of the string
4613 lengths, and it doesn't have side effects, then emit cmpstrnsi
4614 using length MIN(strlen(string)+1, arg3). */
4615 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4616 if (cmpstrn_icode == CODE_FOR_nothing)
4617 return NULL_RTX;
4619 tree len;
4621 tree arg1 = CALL_EXPR_ARG (exp, 0);
4622 tree arg2 = CALL_EXPR_ARG (exp, 1);
4623 tree arg3 = CALL_EXPR_ARG (exp, 2);
4625 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4626 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4628 tree len1 = c_strlen (arg1, 1);
4629 tree len2 = c_strlen (arg2, 1);
4631 location_t loc = EXPR_LOCATION (exp);
4633 if (len1)
4634 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4635 if (len2)
4636 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4638 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4640 /* If we don't have a constant length for the first, use the length
4641 of the second, if we know it. If neither string is constant length,
4642 use the given length argument. We don't require a constant for
4643 this case; some cost analysis could be done if both are available
4644 but neither is constant. For now, assume they're equally cheap,
4645 unless one has side effects. If both strings have constant lengths,
4646 use the smaller. */
4648 if (!len1 && !len2)
4649 len = len3;
4650 else if (!len1)
4651 len = len2;
4652 else if (!len2)
4653 len = len1;
4654 else if (TREE_SIDE_EFFECTS (len1))
4655 len = len2;
4656 else if (TREE_SIDE_EFFECTS (len2))
4657 len = len1;
4658 else if (TREE_CODE (len1) != INTEGER_CST)
4659 len = len2;
4660 else if (TREE_CODE (len2) != INTEGER_CST)
4661 len = len1;
4662 else if (tree_int_cst_lt (len1, len2))
4663 len = len1;
4664 else
4665 len = len2;
4667 /* If we are not using the given length, we must incorporate it here.
4668 The actual new length parameter will be MIN(len,arg3) in this case. */
4669 if (len != len3)
4670 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4671 rtx arg1_rtx = get_memory_rtx (arg1, len);
4672 rtx arg2_rtx = get_memory_rtx (arg2, len);
4673 rtx arg3_rtx = expand_normal (len);
4674 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4675 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4676 MIN (arg1_align, arg2_align));
4678 /* Check to see if the argument was declared attribute nonstring
4679 and if so, issue a warning since at this point it's not known
4680 to be nul-terminated. */
4681 tree fndecl = get_callee_fndecl (exp);
4682 maybe_warn_nonstring_arg (fndecl, exp);
4684 if (result)
4686 /* Return the value in the proper mode for this function. */
4687 mode = TYPE_MODE (TREE_TYPE (exp));
4688 if (GET_MODE (result) == mode)
4689 return result;
4690 if (target == 0)
4691 return convert_to_mode (mode, result, 0);
4692 convert_move (target, result, 0);
4693 return target;
4696 /* Expand the library call ourselves using a stabilized argument
4697 list to avoid re-evaluating the function's arguments twice. */
4698 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4699 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4700 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4701 return expand_call (fn, target, target == const0_rtx);
4704 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4705 if that's convenient. */
4708 expand_builtin_saveregs (void)
4710 rtx val;
4711 rtx_insn *seq;
4713 /* Don't do __builtin_saveregs more than once in a function.
4714 Save the result of the first call and reuse it. */
4715 if (saveregs_value != 0)
4716 return saveregs_value;
4718 /* When this function is called, it means that registers must be
4719 saved on entry to this function. So we migrate the call to the
4720 first insn of this function. */
4722 start_sequence ();
4724 /* Do whatever the machine needs done in this case. */
4725 val = targetm.calls.expand_builtin_saveregs ();
4727 seq = get_insns ();
4728 end_sequence ();
4730 saveregs_value = val;
4732 /* Put the insns after the NOTE that starts the function. If this
4733 is inside a start_sequence, make the outer-level insn chain current, so
4734 the code is placed at the start of the function. */
4735 push_topmost_sequence ();
4736 emit_insn_after (seq, entry_of_function ());
4737 pop_topmost_sequence ();
4739 return val;
4742 /* Expand a call to __builtin_next_arg. */
4744 static rtx
4745 expand_builtin_next_arg (void)
4747 /* Checking arguments is already done in fold_builtin_next_arg
4748 that must be called before this function. */
4749 return expand_binop (ptr_mode, add_optab,
4750 crtl->args.internal_arg_pointer,
4751 crtl->args.arg_offset_rtx,
4752 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4755 /* Make it easier for the backends by protecting the valist argument
4756 from multiple evaluations. */
4758 static tree
4759 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4761 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4763 /* The current way of determining the type of valist is completely
4764 bogus. We should have the information on the va builtin instead. */
4765 if (!vatype)
4766 vatype = targetm.fn_abi_va_list (cfun->decl);
4768 if (TREE_CODE (vatype) == ARRAY_TYPE)
4770 if (TREE_SIDE_EFFECTS (valist))
4771 valist = save_expr (valist);
4773 /* For this case, the backends will be expecting a pointer to
4774 vatype, but it's possible we've actually been given an array
4775 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4776 So fix it. */
4777 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4779 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4780 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4783 else
4785 tree pt = build_pointer_type (vatype);
4787 if (! needs_lvalue)
4789 if (! TREE_SIDE_EFFECTS (valist))
4790 return valist;
4792 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4793 TREE_SIDE_EFFECTS (valist) = 1;
4796 if (TREE_SIDE_EFFECTS (valist))
4797 valist = save_expr (valist);
4798 valist = fold_build2_loc (loc, MEM_REF,
4799 vatype, valist, build_int_cst (pt, 0));
4802 return valist;
4805 /* The "standard" definition of va_list is void*. */
4807 tree
4808 std_build_builtin_va_list (void)
4810 return ptr_type_node;
4813 /* The "standard" abi va_list is va_list_type_node. */
4815 tree
4816 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4818 return va_list_type_node;
4821 /* The "standard" type of va_list is va_list_type_node. */
4823 tree
4824 std_canonical_va_list_type (tree type)
4826 tree wtype, htype;
4828 wtype = va_list_type_node;
4829 htype = type;
4831 if (TREE_CODE (wtype) == ARRAY_TYPE)
4833 /* If va_list is an array type, the argument may have decayed
4834 to a pointer type, e.g. by being passed to another function.
4835 In that case, unwrap both types so that we can compare the
4836 underlying records. */
4837 if (TREE_CODE (htype) == ARRAY_TYPE
4838 || POINTER_TYPE_P (htype))
4840 wtype = TREE_TYPE (wtype);
4841 htype = TREE_TYPE (htype);
4844 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4845 return va_list_type_node;
4847 return NULL_TREE;
4850 /* The "standard" implementation of va_start: just assign `nextarg' to
4851 the variable. */
4853 void
4854 std_expand_builtin_va_start (tree valist, rtx nextarg)
4856 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4857 convert_move (va_r, nextarg, 0);
4859 /* We do not have any valid bounds for the pointer, so
4860 just store zero bounds for it. */
4861 if (chkp_function_instrumented_p (current_function_decl))
4862 chkp_expand_bounds_reset_for_mem (valist,
4863 make_tree (TREE_TYPE (valist),
4864 nextarg));
4867 /* Expand EXP, a call to __builtin_va_start. */
4869 static rtx
4870 expand_builtin_va_start (tree exp)
4872 rtx nextarg;
4873 tree valist;
4874 location_t loc = EXPR_LOCATION (exp);
4876 if (call_expr_nargs (exp) < 2)
4878 error_at (loc, "too few arguments to function %<va_start%>");
4879 return const0_rtx;
4882 if (fold_builtin_next_arg (exp, true))
4883 return const0_rtx;
4885 nextarg = expand_builtin_next_arg ();
4886 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4888 if (targetm.expand_builtin_va_start)
4889 targetm.expand_builtin_va_start (valist, nextarg);
4890 else
4891 std_expand_builtin_va_start (valist, nextarg);
4893 return const0_rtx;
4896 /* Expand EXP, a call to __builtin_va_end. */
4898 static rtx
4899 expand_builtin_va_end (tree exp)
4901 tree valist = CALL_EXPR_ARG (exp, 0);
4903 /* Evaluate for side effects, if needed. I hate macros that don't
4904 do that. */
4905 if (TREE_SIDE_EFFECTS (valist))
4906 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4908 return const0_rtx;
4911 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4912 builtin rather than just as an assignment in stdarg.h because of the
4913 nastiness of array-type va_list types. */
4915 static rtx
4916 expand_builtin_va_copy (tree exp)
4918 tree dst, src, t;
4919 location_t loc = EXPR_LOCATION (exp);
4921 dst = CALL_EXPR_ARG (exp, 0);
4922 src = CALL_EXPR_ARG (exp, 1);
4924 dst = stabilize_va_list_loc (loc, dst, 1);
4925 src = stabilize_va_list_loc (loc, src, 0);
4927 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4929 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4931 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4932 TREE_SIDE_EFFECTS (t) = 1;
4933 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4935 else
4937 rtx dstb, srcb, size;
4939 /* Evaluate to pointers. */
4940 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4941 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4942 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4943 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4945 dstb = convert_memory_address (Pmode, dstb);
4946 srcb = convert_memory_address (Pmode, srcb);
4948 /* "Dereference" to BLKmode memories. */
4949 dstb = gen_rtx_MEM (BLKmode, dstb);
4950 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4951 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4952 srcb = gen_rtx_MEM (BLKmode, srcb);
4953 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4954 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4956 /* Copy. */
4957 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4960 return const0_rtx;
4963 /* Expand a call to one of the builtin functions __builtin_frame_address or
4964 __builtin_return_address. */
4966 static rtx
4967 expand_builtin_frame_address (tree fndecl, tree exp)
4969 /* The argument must be a nonnegative integer constant.
4970 It counts the number of frames to scan up the stack.
4971 The value is either the frame pointer value or the return
4972 address saved in that frame. */
4973 if (call_expr_nargs (exp) == 0)
4974 /* Warning about missing arg was already issued. */
4975 return const0_rtx;
4976 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4978 error ("invalid argument to %qD", fndecl);
4979 return const0_rtx;
4981 else
4983 /* Number of frames to scan up the stack. */
4984 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4986 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4988 /* Some ports cannot access arbitrary stack frames. */
4989 if (tem == NULL)
4991 warning (0, "unsupported argument to %qD", fndecl);
4992 return const0_rtx;
4995 if (count)
4997 /* Warn since no effort is made to ensure that any frame
4998 beyond the current one exists or can be safely reached. */
4999 warning (OPT_Wframe_address, "calling %qD with "
5000 "a nonzero argument is unsafe", fndecl);
5003 /* For __builtin_frame_address, return what we've got. */
5004 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5005 return tem;
5007 if (!REG_P (tem)
5008 && ! CONSTANT_P (tem))
5009 tem = copy_addr_to_reg (tem);
5010 return tem;
5014 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5015 failed and the caller should emit a normal call. */
5017 static rtx
5018 expand_builtin_alloca (tree exp)
5020 rtx op0;
5021 rtx result;
5022 unsigned int align;
5023 tree fndecl = get_callee_fndecl (exp);
5024 HOST_WIDE_INT max_size;
5025 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5026 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5027 bool valid_arglist
5028 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5029 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5030 VOID_TYPE)
5031 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5032 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5033 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5035 if (!valid_arglist)
5036 return NULL_RTX;
5038 if ((alloca_for_var && !warn_vla_limit)
5039 || (!alloca_for_var && !warn_alloca_limit))
5041 /* -Walloca-larger-than and -Wvla-larger-than settings override
5042 the more general -Walloc-size-larger-than so unless either of
5043 the former options is specified check the alloca arguments for
5044 overflow. */
5045 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5046 int idx[] = { 0, -1 };
5047 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5050 /* Compute the argument. */
5051 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5053 /* Compute the alignment. */
5054 align = (fcode == BUILT_IN_ALLOCA
5055 ? BIGGEST_ALIGNMENT
5056 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5058 /* Compute the maximum size. */
5059 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5060 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5061 : -1);
5063 /* Allocate the desired space. If the allocation stems from the declaration
5064 of a variable-sized object, it cannot accumulate. */
5065 result
5066 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5067 result = convert_memory_address (ptr_mode, result);
5069 return result;
5072 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5073 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5074 dummy value into second parameter relying on this function to perform the
5075 change. See motivation for this in comment to handle_builtin_stack_restore
5076 function. */
5078 static rtx
5079 expand_asan_emit_allocas_unpoison (tree exp)
5081 tree arg0 = CALL_EXPR_ARG (exp, 0);
5082 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5083 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5084 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5085 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5086 top, ptr_mode, bot, ptr_mode);
5087 return ret;
5090 /* Expand a call to bswap builtin in EXP.
5091 Return NULL_RTX if a normal call should be emitted rather than expanding the
5092 function in-line. If convenient, the result should be placed in TARGET.
5093 SUBTARGET may be used as the target for computing one of EXP's operands. */
5095 static rtx
5096 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5097 rtx subtarget)
5099 tree arg;
5100 rtx op0;
5102 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5103 return NULL_RTX;
5105 arg = CALL_EXPR_ARG (exp, 0);
5106 op0 = expand_expr (arg,
5107 subtarget && GET_MODE (subtarget) == target_mode
5108 ? subtarget : NULL_RTX,
5109 target_mode, EXPAND_NORMAL);
5110 if (GET_MODE (op0) != target_mode)
5111 op0 = convert_to_mode (target_mode, op0, 1);
5113 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5115 gcc_assert (target);
5117 return convert_to_mode (target_mode, target, 1);
5120 /* Expand a call to a unary builtin in EXP.
5121 Return NULL_RTX if a normal call should be emitted rather than expanding the
5122 function in-line. If convenient, the result should be placed in TARGET.
5123 SUBTARGET may be used as the target for computing one of EXP's operands. */
5125 static rtx
5126 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5127 rtx subtarget, optab op_optab)
5129 rtx op0;
5131 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5132 return NULL_RTX;
5134 /* Compute the argument. */
5135 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5136 (subtarget
5137 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5138 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5139 VOIDmode, EXPAND_NORMAL);
5140 /* Compute op, into TARGET if possible.
5141 Set TARGET to wherever the result comes back. */
5142 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5143 op_optab, op0, target, op_optab != clrsb_optab);
5144 gcc_assert (target);
5146 return convert_to_mode (target_mode, target, 0);
5149 /* Expand a call to __builtin_expect. We just return our argument
5150 as the builtin_expect semantic should've been already executed by
5151 tree branch prediction pass. */
5153 static rtx
5154 expand_builtin_expect (tree exp, rtx target)
5156 tree arg;
5158 if (call_expr_nargs (exp) < 2)
5159 return const0_rtx;
5160 arg = CALL_EXPR_ARG (exp, 0);
5162 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5163 /* When guessing was done, the hints should be already stripped away. */
5164 gcc_assert (!flag_guess_branch_prob
5165 || optimize == 0 || seen_error ());
5166 return target;
5169 /* Expand a call to __builtin_assume_aligned. We just return our first
5170 argument as the builtin_assume_aligned semantic should've been already
5171 executed by CCP. */
5173 static rtx
5174 expand_builtin_assume_aligned (tree exp, rtx target)
5176 if (call_expr_nargs (exp) < 2)
5177 return const0_rtx;
5178 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5179 EXPAND_NORMAL);
5180 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5181 && (call_expr_nargs (exp) < 3
5182 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5183 return target;
5186 void
5187 expand_builtin_trap (void)
5189 if (targetm.have_trap ())
5191 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5192 /* For trap insns when not accumulating outgoing args force
5193 REG_ARGS_SIZE note to prevent crossjumping of calls with
5194 different args sizes. */
5195 if (!ACCUMULATE_OUTGOING_ARGS)
5196 add_args_size_note (insn, stack_pointer_delta);
5198 else
5200 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5201 tree call_expr = build_call_expr (fn, 0);
5202 expand_call (call_expr, NULL_RTX, false);
5205 emit_barrier ();
5208 /* Expand a call to __builtin_unreachable. We do nothing except emit
5209 a barrier saying that control flow will not pass here.
5211 It is the responsibility of the program being compiled to ensure
5212 that control flow does never reach __builtin_unreachable. */
5213 static void
5214 expand_builtin_unreachable (void)
5216 emit_barrier ();
5219 /* Expand EXP, a call to fabs, fabsf or fabsl.
5220 Return NULL_RTX if a normal call should be emitted rather than expanding
5221 the function inline. If convenient, the result should be placed
5222 in TARGET. SUBTARGET may be used as the target for computing
5223 the operand. */
5225 static rtx
5226 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5228 machine_mode mode;
5229 tree arg;
5230 rtx op0;
5232 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5233 return NULL_RTX;
5235 arg = CALL_EXPR_ARG (exp, 0);
5236 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5237 mode = TYPE_MODE (TREE_TYPE (arg));
5238 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5239 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5242 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5243 Return NULL is a normal call should be emitted rather than expanding the
5244 function inline. If convenient, the result should be placed in TARGET.
5245 SUBTARGET may be used as the target for computing the operand. */
5247 static rtx
5248 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5250 rtx op0, op1;
5251 tree arg;
5253 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5254 return NULL_RTX;
5256 arg = CALL_EXPR_ARG (exp, 0);
5257 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5259 arg = CALL_EXPR_ARG (exp, 1);
5260 op1 = expand_normal (arg);
5262 return expand_copysign (op0, op1, target);
5265 /* Expand a call to __builtin___clear_cache. */
5267 static rtx
5268 expand_builtin___clear_cache (tree exp)
5270 if (!targetm.code_for_clear_cache)
5272 #ifdef CLEAR_INSN_CACHE
5273 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5274 does something. Just do the default expansion to a call to
5275 __clear_cache(). */
5276 return NULL_RTX;
5277 #else
5278 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5279 does nothing. There is no need to call it. Do nothing. */
5280 return const0_rtx;
5281 #endif /* CLEAR_INSN_CACHE */
5284 /* We have a "clear_cache" insn, and it will handle everything. */
5285 tree begin, end;
5286 rtx begin_rtx, end_rtx;
5288 /* We must not expand to a library call. If we did, any
5289 fallback library function in libgcc that might contain a call to
5290 __builtin___clear_cache() would recurse infinitely. */
5291 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5293 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5294 return const0_rtx;
5297 if (targetm.have_clear_cache ())
5299 struct expand_operand ops[2];
5301 begin = CALL_EXPR_ARG (exp, 0);
5302 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5304 end = CALL_EXPR_ARG (exp, 1);
5305 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5307 create_address_operand (&ops[0], begin_rtx);
5308 create_address_operand (&ops[1], end_rtx);
5309 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5310 return const0_rtx;
5312 return const0_rtx;
5315 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5317 static rtx
5318 round_trampoline_addr (rtx tramp)
5320 rtx temp, addend, mask;
5322 /* If we don't need too much alignment, we'll have been guaranteed
5323 proper alignment by get_trampoline_type. */
5324 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5325 return tramp;
5327 /* Round address up to desired boundary. */
5328 temp = gen_reg_rtx (Pmode);
5329 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5330 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5332 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5333 temp, 0, OPTAB_LIB_WIDEN);
5334 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5335 temp, 0, OPTAB_LIB_WIDEN);
5337 return tramp;
5340 static rtx
5341 expand_builtin_init_trampoline (tree exp, bool onstack)
5343 tree t_tramp, t_func, t_chain;
5344 rtx m_tramp, r_tramp, r_chain, tmp;
5346 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5347 POINTER_TYPE, VOID_TYPE))
5348 return NULL_RTX;
5350 t_tramp = CALL_EXPR_ARG (exp, 0);
5351 t_func = CALL_EXPR_ARG (exp, 1);
5352 t_chain = CALL_EXPR_ARG (exp, 2);
5354 r_tramp = expand_normal (t_tramp);
5355 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5356 MEM_NOTRAP_P (m_tramp) = 1;
5358 /* If ONSTACK, the TRAMP argument should be the address of a field
5359 within the local function's FRAME decl. Either way, let's see if
5360 we can fill in the MEM_ATTRs for this memory. */
5361 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5362 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5364 /* Creator of a heap trampoline is responsible for making sure the
5365 address is aligned to at least STACK_BOUNDARY. Normally malloc
5366 will ensure this anyhow. */
5367 tmp = round_trampoline_addr (r_tramp);
5368 if (tmp != r_tramp)
5370 m_tramp = change_address (m_tramp, BLKmode, tmp);
5371 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5372 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5375 /* The FUNC argument should be the address of the nested function.
5376 Extract the actual function decl to pass to the hook. */
5377 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5378 t_func = TREE_OPERAND (t_func, 0);
5379 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5381 r_chain = expand_normal (t_chain);
5383 /* Generate insns to initialize the trampoline. */
5384 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5386 if (onstack)
5388 trampolines_created = 1;
5390 if (targetm.calls.custom_function_descriptors != 0)
5391 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5392 "trampoline generated for nested function %qD", t_func);
5395 return const0_rtx;
5398 static rtx
5399 expand_builtin_adjust_trampoline (tree exp)
5401 rtx tramp;
5403 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5404 return NULL_RTX;
5406 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5407 tramp = round_trampoline_addr (tramp);
5408 if (targetm.calls.trampoline_adjust_address)
5409 tramp = targetm.calls.trampoline_adjust_address (tramp);
5411 return tramp;
5414 /* Expand a call to the builtin descriptor initialization routine.
5415 A descriptor is made up of a couple of pointers to the static
5416 chain and the code entry in this order. */
5418 static rtx
5419 expand_builtin_init_descriptor (tree exp)
5421 tree t_descr, t_func, t_chain;
5422 rtx m_descr, r_descr, r_func, r_chain;
5424 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5425 VOID_TYPE))
5426 return NULL_RTX;
5428 t_descr = CALL_EXPR_ARG (exp, 0);
5429 t_func = CALL_EXPR_ARG (exp, 1);
5430 t_chain = CALL_EXPR_ARG (exp, 2);
5432 r_descr = expand_normal (t_descr);
5433 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5434 MEM_NOTRAP_P (m_descr) = 1;
5436 r_func = expand_normal (t_func);
5437 r_chain = expand_normal (t_chain);
5439 /* Generate insns to initialize the descriptor. */
5440 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5441 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5442 POINTER_SIZE / BITS_PER_UNIT), r_func);
5444 return const0_rtx;
5447 /* Expand a call to the builtin descriptor adjustment routine. */
5449 static rtx
5450 expand_builtin_adjust_descriptor (tree exp)
5452 rtx tramp;
5454 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5455 return NULL_RTX;
5457 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5459 /* Unalign the descriptor to allow runtime identification. */
5460 tramp = plus_constant (ptr_mode, tramp,
5461 targetm.calls.custom_function_descriptors);
5463 return force_operand (tramp, NULL_RTX);
5466 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5467 function. The function first checks whether the back end provides
5468 an insn to implement signbit for the respective mode. If not, it
5469 checks whether the floating point format of the value is such that
5470 the sign bit can be extracted. If that is not the case, error out.
5471 EXP is the expression that is a call to the builtin function; if
5472 convenient, the result should be placed in TARGET. */
5473 static rtx
5474 expand_builtin_signbit (tree exp, rtx target)
5476 const struct real_format *fmt;
5477 scalar_float_mode fmode;
5478 scalar_int_mode rmode, imode;
5479 tree arg;
5480 int word, bitpos;
5481 enum insn_code icode;
5482 rtx temp;
5483 location_t loc = EXPR_LOCATION (exp);
5485 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5486 return NULL_RTX;
5488 arg = CALL_EXPR_ARG (exp, 0);
5489 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5490 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5491 fmt = REAL_MODE_FORMAT (fmode);
5493 arg = builtin_save_expr (arg);
5495 /* Expand the argument yielding a RTX expression. */
5496 temp = expand_normal (arg);
5498 /* Check if the back end provides an insn that handles signbit for the
5499 argument's mode. */
5500 icode = optab_handler (signbit_optab, fmode);
5501 if (icode != CODE_FOR_nothing)
5503 rtx_insn *last = get_last_insn ();
5504 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5505 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5506 return target;
5507 delete_insns_since (last);
5510 /* For floating point formats without a sign bit, implement signbit
5511 as "ARG < 0.0". */
5512 bitpos = fmt->signbit_ro;
5513 if (bitpos < 0)
5515 /* But we can't do this if the format supports signed zero. */
5516 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5518 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5519 build_real (TREE_TYPE (arg), dconst0));
5520 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5523 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5525 imode = int_mode_for_mode (fmode).require ();
5526 temp = gen_lowpart (imode, temp);
5528 else
5530 imode = word_mode;
5531 /* Handle targets with different FP word orders. */
5532 if (FLOAT_WORDS_BIG_ENDIAN)
5533 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5534 else
5535 word = bitpos / BITS_PER_WORD;
5536 temp = operand_subword_force (temp, word, fmode);
5537 bitpos = bitpos % BITS_PER_WORD;
5540 /* Force the intermediate word_mode (or narrower) result into a
5541 register. This avoids attempting to create paradoxical SUBREGs
5542 of floating point modes below. */
5543 temp = force_reg (imode, temp);
5545 /* If the bitpos is within the "result mode" lowpart, the operation
5546 can be implement with a single bitwise AND. Otherwise, we need
5547 a right shift and an AND. */
5549 if (bitpos < GET_MODE_BITSIZE (rmode))
5551 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5553 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5554 temp = gen_lowpart (rmode, temp);
5555 temp = expand_binop (rmode, and_optab, temp,
5556 immed_wide_int_const (mask, rmode),
5557 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5559 else
5561 /* Perform a logical right shift to place the signbit in the least
5562 significant bit, then truncate the result to the desired mode
5563 and mask just this bit. */
5564 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5565 temp = gen_lowpart (rmode, temp);
5566 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5567 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5570 return temp;
5573 /* Expand fork or exec calls. TARGET is the desired target of the
5574 call. EXP is the call. FN is the
5575 identificator of the actual function. IGNORE is nonzero if the
5576 value is to be ignored. */
5578 static rtx
5579 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5581 tree id, decl;
5582 tree call;
5584 /* If we are not profiling, just call the function. */
5585 if (!profile_arc_flag)
5586 return NULL_RTX;
5588 /* Otherwise call the wrapper. This should be equivalent for the rest of
5589 compiler, so the code does not diverge, and the wrapper may run the
5590 code necessary for keeping the profiling sane. */
5592 switch (DECL_FUNCTION_CODE (fn))
5594 case BUILT_IN_FORK:
5595 id = get_identifier ("__gcov_fork");
5596 break;
5598 case BUILT_IN_EXECL:
5599 id = get_identifier ("__gcov_execl");
5600 break;
5602 case BUILT_IN_EXECV:
5603 id = get_identifier ("__gcov_execv");
5604 break;
5606 case BUILT_IN_EXECLP:
5607 id = get_identifier ("__gcov_execlp");
5608 break;
5610 case BUILT_IN_EXECLE:
5611 id = get_identifier ("__gcov_execle");
5612 break;
5614 case BUILT_IN_EXECVP:
5615 id = get_identifier ("__gcov_execvp");
5616 break;
5618 case BUILT_IN_EXECVE:
5619 id = get_identifier ("__gcov_execve");
5620 break;
5622 default:
5623 gcc_unreachable ();
5626 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5627 FUNCTION_DECL, id, TREE_TYPE (fn));
5628 DECL_EXTERNAL (decl) = 1;
5629 TREE_PUBLIC (decl) = 1;
5630 DECL_ARTIFICIAL (decl) = 1;
5631 TREE_NOTHROW (decl) = 1;
5632 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5633 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5634 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5635 return expand_call (call, target, ignore);
5640 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5641 the pointer in these functions is void*, the tree optimizers may remove
5642 casts. The mode computed in expand_builtin isn't reliable either, due
5643 to __sync_bool_compare_and_swap.
5645 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5646 group of builtins. This gives us log2 of the mode size. */
5648 static inline machine_mode
5649 get_builtin_sync_mode (int fcode_diff)
5651 /* The size is not negotiable, so ask not to get BLKmode in return
5652 if the target indicates that a smaller size would be better. */
5653 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5656 /* Expand the memory expression LOC and return the appropriate memory operand
5657 for the builtin_sync operations. */
5659 static rtx
5660 get_builtin_sync_mem (tree loc, machine_mode mode)
5662 rtx addr, mem;
5664 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5665 addr = convert_memory_address (Pmode, addr);
5667 /* Note that we explicitly do not want any alias information for this
5668 memory, so that we kill all other live memories. Otherwise we don't
5669 satisfy the full barrier semantics of the intrinsic. */
5670 mem = validize_mem (gen_rtx_MEM (mode, addr));
5672 /* The alignment needs to be at least according to that of the mode. */
5673 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5674 get_pointer_alignment (loc)));
5675 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5676 MEM_VOLATILE_P (mem) = 1;
5678 return mem;
5681 /* Make sure an argument is in the right mode.
5682 EXP is the tree argument.
5683 MODE is the mode it should be in. */
5685 static rtx
5686 expand_expr_force_mode (tree exp, machine_mode mode)
5688 rtx val;
5689 machine_mode old_mode;
5691 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5692 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5693 of CONST_INTs, where we know the old_mode only from the call argument. */
5695 old_mode = GET_MODE (val);
5696 if (old_mode == VOIDmode)
5697 old_mode = TYPE_MODE (TREE_TYPE (exp));
5698 val = convert_modes (mode, old_mode, val, 1);
5699 return val;
5703 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5704 EXP is the CALL_EXPR. CODE is the rtx code
5705 that corresponds to the arithmetic or logical operation from the name;
5706 an exception here is that NOT actually means NAND. TARGET is an optional
5707 place for us to store the results; AFTER is true if this is the
5708 fetch_and_xxx form. */
5710 static rtx
5711 expand_builtin_sync_operation (machine_mode mode, tree exp,
5712 enum rtx_code code, bool after,
5713 rtx target)
5715 rtx val, mem;
5716 location_t loc = EXPR_LOCATION (exp);
5718 if (code == NOT && warn_sync_nand)
5720 tree fndecl = get_callee_fndecl (exp);
5721 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5723 static bool warned_f_a_n, warned_n_a_f;
5725 switch (fcode)
5727 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5728 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5729 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5730 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5731 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5732 if (warned_f_a_n)
5733 break;
5735 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5736 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5737 warned_f_a_n = true;
5738 break;
5740 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5741 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5742 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5743 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5744 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5745 if (warned_n_a_f)
5746 break;
5748 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5749 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5750 warned_n_a_f = true;
5751 break;
5753 default:
5754 gcc_unreachable ();
5758 /* Expand the operands. */
5759 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5760 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5762 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5763 after);
5766 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5767 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5768 true if this is the boolean form. TARGET is a place for us to store the
5769 results; this is NOT optional if IS_BOOL is true. */
5771 static rtx
5772 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5773 bool is_bool, rtx target)
5775 rtx old_val, new_val, mem;
5776 rtx *pbool, *poval;
5778 /* Expand the operands. */
5779 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5780 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5781 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5783 pbool = poval = NULL;
5784 if (target != const0_rtx)
5786 if (is_bool)
5787 pbool = &target;
5788 else
5789 poval = &target;
5791 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5792 false, MEMMODEL_SYNC_SEQ_CST,
5793 MEMMODEL_SYNC_SEQ_CST))
5794 return NULL_RTX;
5796 return target;
5799 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5800 general form is actually an atomic exchange, and some targets only
5801 support a reduced form with the second argument being a constant 1.
5802 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5803 the results. */
5805 static rtx
5806 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5807 rtx target)
5809 rtx val, mem;
5811 /* Expand the operands. */
5812 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5813 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5815 return expand_sync_lock_test_and_set (target, mem, val);
5818 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5820 static void
5821 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5823 rtx mem;
5825 /* Expand the operands. */
5826 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5828 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5831 /* Given an integer representing an ``enum memmodel'', verify its
5832 correctness and return the memory model enum. */
5834 static enum memmodel
5835 get_memmodel (tree exp)
5837 rtx op;
5838 unsigned HOST_WIDE_INT val;
5839 source_location loc
5840 = expansion_point_location_if_in_system_header (input_location);
5842 /* If the parameter is not a constant, it's a run time value so we'll just
5843 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5844 if (TREE_CODE (exp) != INTEGER_CST)
5845 return MEMMODEL_SEQ_CST;
5847 op = expand_normal (exp);
5849 val = INTVAL (op);
5850 if (targetm.memmodel_check)
5851 val = targetm.memmodel_check (val);
5852 else if (val & ~MEMMODEL_MASK)
5854 warning_at (loc, OPT_Winvalid_memory_model,
5855 "unknown architecture specifier in memory model to builtin");
5856 return MEMMODEL_SEQ_CST;
5859 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5860 if (memmodel_base (val) >= MEMMODEL_LAST)
5862 warning_at (loc, OPT_Winvalid_memory_model,
5863 "invalid memory model argument to builtin");
5864 return MEMMODEL_SEQ_CST;
5867 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5868 be conservative and promote consume to acquire. */
5869 if (val == MEMMODEL_CONSUME)
5870 val = MEMMODEL_ACQUIRE;
5872 return (enum memmodel) val;
5875 /* Expand the __atomic_exchange intrinsic:
5876 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5877 EXP is the CALL_EXPR.
5878 TARGET is an optional place for us to store the results. */
5880 static rtx
5881 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5883 rtx val, mem;
5884 enum memmodel model;
5886 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5888 if (!flag_inline_atomics)
5889 return NULL_RTX;
5891 /* Expand the operands. */
5892 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5893 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5895 return expand_atomic_exchange (target, mem, val, model);
5898 /* Expand the __atomic_compare_exchange intrinsic:
5899 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5900 TYPE desired, BOOL weak,
5901 enum memmodel success,
5902 enum memmodel failure)
5903 EXP is the CALL_EXPR.
5904 TARGET is an optional place for us to store the results. */
5906 static rtx
5907 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5908 rtx target)
5910 rtx expect, desired, mem, oldval;
5911 rtx_code_label *label;
5912 enum memmodel success, failure;
5913 tree weak;
5914 bool is_weak;
5915 source_location loc
5916 = expansion_point_location_if_in_system_header (input_location);
5918 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5919 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5921 if (failure > success)
5923 warning_at (loc, OPT_Winvalid_memory_model,
5924 "failure memory model cannot be stronger than success "
5925 "memory model for %<__atomic_compare_exchange%>");
5926 success = MEMMODEL_SEQ_CST;
5929 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5931 warning_at (loc, OPT_Winvalid_memory_model,
5932 "invalid failure memory model for "
5933 "%<__atomic_compare_exchange%>");
5934 failure = MEMMODEL_SEQ_CST;
5935 success = MEMMODEL_SEQ_CST;
5939 if (!flag_inline_atomics)
5940 return NULL_RTX;
5942 /* Expand the operands. */
5943 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5945 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5946 expect = convert_memory_address (Pmode, expect);
5947 expect = gen_rtx_MEM (mode, expect);
5948 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5950 weak = CALL_EXPR_ARG (exp, 3);
5951 is_weak = false;
5952 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5953 is_weak = true;
5955 if (target == const0_rtx)
5956 target = NULL;
5958 /* Lest the rtl backend create a race condition with an imporoper store
5959 to memory, always create a new pseudo for OLDVAL. */
5960 oldval = NULL;
5962 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5963 is_weak, success, failure))
5964 return NULL_RTX;
5966 /* Conditionally store back to EXPECT, lest we create a race condition
5967 with an improper store to memory. */
5968 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5969 the normal case where EXPECT is totally private, i.e. a register. At
5970 which point the store can be unconditional. */
5971 label = gen_label_rtx ();
5972 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5973 GET_MODE (target), 1, label);
5974 emit_move_insn (expect, oldval);
5975 emit_label (label);
5977 return target;
5980 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5981 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5982 call. The weak parameter must be dropped to match the expected parameter
5983 list and the expected argument changed from value to pointer to memory
5984 slot. */
5986 static void
5987 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5989 unsigned int z;
5990 vec<tree, va_gc> *vec;
5992 vec_alloc (vec, 5);
5993 vec->quick_push (gimple_call_arg (call, 0));
5994 tree expected = gimple_call_arg (call, 1);
5995 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5996 TREE_TYPE (expected));
5997 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5998 if (expd != x)
5999 emit_move_insn (x, expd);
6000 tree v = make_tree (TREE_TYPE (expected), x);
6001 vec->quick_push (build1 (ADDR_EXPR,
6002 build_pointer_type (TREE_TYPE (expected)), v));
6003 vec->quick_push (gimple_call_arg (call, 2));
6004 /* Skip the boolean weak parameter. */
6005 for (z = 4; z < 6; z++)
6006 vec->quick_push (gimple_call_arg (call, z));
6007 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6008 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6009 gcc_assert (bytes_log2 < 5);
6010 built_in_function fncode
6011 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6012 + bytes_log2);
6013 tree fndecl = builtin_decl_explicit (fncode);
6014 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6015 fndecl);
6016 tree exp = build_call_vec (boolean_type_node, fn, vec);
6017 tree lhs = gimple_call_lhs (call);
6018 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6019 if (lhs)
6021 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6022 if (GET_MODE (boolret) != mode)
6023 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6024 x = force_reg (mode, x);
6025 write_complex_part (target, boolret, true);
6026 write_complex_part (target, x, false);
6030 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6032 void
6033 expand_ifn_atomic_compare_exchange (gcall *call)
6035 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6036 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6037 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6038 rtx expect, desired, mem, oldval, boolret;
6039 enum memmodel success, failure;
6040 tree lhs;
6041 bool is_weak;
6042 source_location loc
6043 = expansion_point_location_if_in_system_header (gimple_location (call));
6045 success = get_memmodel (gimple_call_arg (call, 4));
6046 failure = get_memmodel (gimple_call_arg (call, 5));
6048 if (failure > success)
6050 warning_at (loc, OPT_Winvalid_memory_model,
6051 "failure memory model cannot be stronger than success "
6052 "memory model for %<__atomic_compare_exchange%>");
6053 success = MEMMODEL_SEQ_CST;
6056 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6058 warning_at (loc, OPT_Winvalid_memory_model,
6059 "invalid failure memory model for "
6060 "%<__atomic_compare_exchange%>");
6061 failure = MEMMODEL_SEQ_CST;
6062 success = MEMMODEL_SEQ_CST;
6065 if (!flag_inline_atomics)
6067 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6068 return;
6071 /* Expand the operands. */
6072 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6074 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6075 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6077 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6079 boolret = NULL;
6080 oldval = NULL;
6082 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6083 is_weak, success, failure))
6085 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6086 return;
6089 lhs = gimple_call_lhs (call);
6090 if (lhs)
6092 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6093 if (GET_MODE (boolret) != mode)
6094 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6095 write_complex_part (target, boolret, true);
6096 write_complex_part (target, oldval, false);
6100 /* Expand the __atomic_load intrinsic:
6101 TYPE __atomic_load (TYPE *object, enum memmodel)
6102 EXP is the CALL_EXPR.
6103 TARGET is an optional place for us to store the results. */
6105 static rtx
6106 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6108 rtx mem;
6109 enum memmodel model;
6111 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6112 if (is_mm_release (model) || is_mm_acq_rel (model))
6114 source_location loc
6115 = expansion_point_location_if_in_system_header (input_location);
6116 warning_at (loc, OPT_Winvalid_memory_model,
6117 "invalid memory model for %<__atomic_load%>");
6118 model = MEMMODEL_SEQ_CST;
6121 if (!flag_inline_atomics)
6122 return NULL_RTX;
6124 /* Expand the operand. */
6125 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6127 return expand_atomic_load (target, mem, model);
6131 /* Expand the __atomic_store intrinsic:
6132 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6133 EXP is the CALL_EXPR.
6134 TARGET is an optional place for us to store the results. */
6136 static rtx
6137 expand_builtin_atomic_store (machine_mode mode, tree exp)
6139 rtx mem, val;
6140 enum memmodel model;
6142 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6143 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6144 || is_mm_release (model)))
6146 source_location loc
6147 = expansion_point_location_if_in_system_header (input_location);
6148 warning_at (loc, OPT_Winvalid_memory_model,
6149 "invalid memory model for %<__atomic_store%>");
6150 model = MEMMODEL_SEQ_CST;
6153 if (!flag_inline_atomics)
6154 return NULL_RTX;
6156 /* Expand the operands. */
6157 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6158 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6160 return expand_atomic_store (mem, val, model, false);
6163 /* Expand the __atomic_fetch_XXX intrinsic:
6164 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6165 EXP is the CALL_EXPR.
6166 TARGET is an optional place for us to store the results.
6167 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6168 FETCH_AFTER is true if returning the result of the operation.
6169 FETCH_AFTER is false if returning the value before the operation.
6170 IGNORE is true if the result is not used.
6171 EXT_CALL is the correct builtin for an external call if this cannot be
6172 resolved to an instruction sequence. */
6174 static rtx
6175 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6176 enum rtx_code code, bool fetch_after,
6177 bool ignore, enum built_in_function ext_call)
6179 rtx val, mem, ret;
6180 enum memmodel model;
6181 tree fndecl;
6182 tree addr;
6184 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6186 /* Expand the operands. */
6187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6188 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6190 /* Only try generating instructions if inlining is turned on. */
6191 if (flag_inline_atomics)
6193 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6194 if (ret)
6195 return ret;
6198 /* Return if a different routine isn't needed for the library call. */
6199 if (ext_call == BUILT_IN_NONE)
6200 return NULL_RTX;
6202 /* Change the call to the specified function. */
6203 fndecl = get_callee_fndecl (exp);
6204 addr = CALL_EXPR_FN (exp);
6205 STRIP_NOPS (addr);
6207 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6208 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6210 /* If we will emit code after the call, the call can not be a tail call.
6211 If it is emitted as a tail call, a barrier is emitted after it, and
6212 then all trailing code is removed. */
6213 if (!ignore)
6214 CALL_EXPR_TAILCALL (exp) = 0;
6216 /* Expand the call here so we can emit trailing code. */
6217 ret = expand_call (exp, target, ignore);
6219 /* Replace the original function just in case it matters. */
6220 TREE_OPERAND (addr, 0) = fndecl;
6222 /* Then issue the arithmetic correction to return the right result. */
6223 if (!ignore)
6225 if (code == NOT)
6227 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6228 OPTAB_LIB_WIDEN);
6229 ret = expand_simple_unop (mode, NOT, ret, target, true);
6231 else
6232 ret = expand_simple_binop (mode, code, ret, val, target, true,
6233 OPTAB_LIB_WIDEN);
6235 return ret;
6238 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6240 void
6241 expand_ifn_atomic_bit_test_and (gcall *call)
6243 tree ptr = gimple_call_arg (call, 0);
6244 tree bit = gimple_call_arg (call, 1);
6245 tree flag = gimple_call_arg (call, 2);
6246 tree lhs = gimple_call_lhs (call);
6247 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6248 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6249 enum rtx_code code;
6250 optab optab;
6251 struct expand_operand ops[5];
6253 gcc_assert (flag_inline_atomics);
6255 if (gimple_call_num_args (call) == 4)
6256 model = get_memmodel (gimple_call_arg (call, 3));
6258 rtx mem = get_builtin_sync_mem (ptr, mode);
6259 rtx val = expand_expr_force_mode (bit, mode);
6261 switch (gimple_call_internal_fn (call))
6263 case IFN_ATOMIC_BIT_TEST_AND_SET:
6264 code = IOR;
6265 optab = atomic_bit_test_and_set_optab;
6266 break;
6267 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6268 code = XOR;
6269 optab = atomic_bit_test_and_complement_optab;
6270 break;
6271 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6272 code = AND;
6273 optab = atomic_bit_test_and_reset_optab;
6274 break;
6275 default:
6276 gcc_unreachable ();
6279 if (lhs == NULL_TREE)
6281 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6282 val, NULL_RTX, true, OPTAB_DIRECT);
6283 if (code == AND)
6284 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6285 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6286 return;
6289 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6290 enum insn_code icode = direct_optab_handler (optab, mode);
6291 gcc_assert (icode != CODE_FOR_nothing);
6292 create_output_operand (&ops[0], target, mode);
6293 create_fixed_operand (&ops[1], mem);
6294 create_convert_operand_to (&ops[2], val, mode, true);
6295 create_integer_operand (&ops[3], model);
6296 create_integer_operand (&ops[4], integer_onep (flag));
6297 if (maybe_expand_insn (icode, 5, ops))
6298 return;
6300 rtx bitval = val;
6301 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6302 val, NULL_RTX, true, OPTAB_DIRECT);
6303 rtx maskval = val;
6304 if (code == AND)
6305 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6306 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6307 code, model, false);
6308 if (integer_onep (flag))
6310 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6311 NULL_RTX, true, OPTAB_DIRECT);
6312 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6313 true, OPTAB_DIRECT);
6315 else
6316 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6317 OPTAB_DIRECT);
6318 if (result != target)
6319 emit_move_insn (target, result);
6322 /* Expand an atomic clear operation.
6323 void _atomic_clear (BOOL *obj, enum memmodel)
6324 EXP is the call expression. */
6326 static rtx
6327 expand_builtin_atomic_clear (tree exp)
6329 machine_mode mode;
6330 rtx mem, ret;
6331 enum memmodel model;
6333 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6334 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6335 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6337 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6339 source_location loc
6340 = expansion_point_location_if_in_system_header (input_location);
6341 warning_at (loc, OPT_Winvalid_memory_model,
6342 "invalid memory model for %<__atomic_store%>");
6343 model = MEMMODEL_SEQ_CST;
6346 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6347 Failing that, a store is issued by __atomic_store. The only way this can
6348 fail is if the bool type is larger than a word size. Unlikely, but
6349 handle it anyway for completeness. Assume a single threaded model since
6350 there is no atomic support in this case, and no barriers are required. */
6351 ret = expand_atomic_store (mem, const0_rtx, model, true);
6352 if (!ret)
6353 emit_move_insn (mem, const0_rtx);
6354 return const0_rtx;
6357 /* Expand an atomic test_and_set operation.
6358 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6359 EXP is the call expression. */
6361 static rtx
6362 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6364 rtx mem;
6365 enum memmodel model;
6366 machine_mode mode;
6368 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6370 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6372 return expand_atomic_test_and_set (target, mem, model);
6376 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6377 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6379 static tree
6380 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6382 int size;
6383 machine_mode mode;
6384 unsigned int mode_align, type_align;
6386 if (TREE_CODE (arg0) != INTEGER_CST)
6387 return NULL_TREE;
6389 /* We need a corresponding integer mode for the access to be lock-free. */
6390 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6391 if (!int_mode_for_size (size, 0).exists (&mode))
6392 return boolean_false_node;
6394 mode_align = GET_MODE_ALIGNMENT (mode);
6396 if (TREE_CODE (arg1) == INTEGER_CST)
6398 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6400 /* Either this argument is null, or it's a fake pointer encoding
6401 the alignment of the object. */
6402 val = least_bit_hwi (val);
6403 val *= BITS_PER_UNIT;
6405 if (val == 0 || mode_align < val)
6406 type_align = mode_align;
6407 else
6408 type_align = val;
6410 else
6412 tree ttype = TREE_TYPE (arg1);
6414 /* This function is usually invoked and folded immediately by the front
6415 end before anything else has a chance to look at it. The pointer
6416 parameter at this point is usually cast to a void *, so check for that
6417 and look past the cast. */
6418 if (CONVERT_EXPR_P (arg1)
6419 && POINTER_TYPE_P (ttype)
6420 && VOID_TYPE_P (TREE_TYPE (ttype))
6421 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6422 arg1 = TREE_OPERAND (arg1, 0);
6424 ttype = TREE_TYPE (arg1);
6425 gcc_assert (POINTER_TYPE_P (ttype));
6427 /* Get the underlying type of the object. */
6428 ttype = TREE_TYPE (ttype);
6429 type_align = TYPE_ALIGN (ttype);
6432 /* If the object has smaller alignment, the lock free routines cannot
6433 be used. */
6434 if (type_align < mode_align)
6435 return boolean_false_node;
6437 /* Check if a compare_and_swap pattern exists for the mode which represents
6438 the required size. The pattern is not allowed to fail, so the existence
6439 of the pattern indicates support is present. Also require that an
6440 atomic load exists for the required size. */
6441 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6442 return boolean_true_node;
6443 else
6444 return boolean_false_node;
6447 /* Return true if the parameters to call EXP represent an object which will
6448 always generate lock free instructions. The first argument represents the
6449 size of the object, and the second parameter is a pointer to the object
6450 itself. If NULL is passed for the object, then the result is based on
6451 typical alignment for an object of the specified size. Otherwise return
6452 false. */
6454 static rtx
6455 expand_builtin_atomic_always_lock_free (tree exp)
6457 tree size;
6458 tree arg0 = CALL_EXPR_ARG (exp, 0);
6459 tree arg1 = CALL_EXPR_ARG (exp, 1);
6461 if (TREE_CODE (arg0) != INTEGER_CST)
6463 error ("non-constant argument 1 to __atomic_always_lock_free");
6464 return const0_rtx;
6467 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6468 if (size == boolean_true_node)
6469 return const1_rtx;
6470 return const0_rtx;
6473 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6474 is lock free on this architecture. */
6476 static tree
6477 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6479 if (!flag_inline_atomics)
6480 return NULL_TREE;
6482 /* If it isn't always lock free, don't generate a result. */
6483 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6484 return boolean_true_node;
6486 return NULL_TREE;
6489 /* Return true if the parameters to call EXP represent an object which will
6490 always generate lock free instructions. The first argument represents the
6491 size of the object, and the second parameter is a pointer to the object
6492 itself. If NULL is passed for the object, then the result is based on
6493 typical alignment for an object of the specified size. Otherwise return
6494 NULL*/
6496 static rtx
6497 expand_builtin_atomic_is_lock_free (tree exp)
6499 tree size;
6500 tree arg0 = CALL_EXPR_ARG (exp, 0);
6501 tree arg1 = CALL_EXPR_ARG (exp, 1);
6503 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6505 error ("non-integer argument 1 to __atomic_is_lock_free");
6506 return NULL_RTX;
6509 if (!flag_inline_atomics)
6510 return NULL_RTX;
6512 /* If the value is known at compile time, return the RTX for it. */
6513 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6514 if (size == boolean_true_node)
6515 return const1_rtx;
6517 return NULL_RTX;
6520 /* Expand the __atomic_thread_fence intrinsic:
6521 void __atomic_thread_fence (enum memmodel)
6522 EXP is the CALL_EXPR. */
6524 static void
6525 expand_builtin_atomic_thread_fence (tree exp)
6527 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6528 expand_mem_thread_fence (model);
6531 /* Expand the __atomic_signal_fence intrinsic:
6532 void __atomic_signal_fence (enum memmodel)
6533 EXP is the CALL_EXPR. */
6535 static void
6536 expand_builtin_atomic_signal_fence (tree exp)
6538 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6539 expand_mem_signal_fence (model);
6542 /* Expand the __sync_synchronize intrinsic. */
6544 static void
6545 expand_builtin_sync_synchronize (void)
6547 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6550 static rtx
6551 expand_builtin_thread_pointer (tree exp, rtx target)
6553 enum insn_code icode;
6554 if (!validate_arglist (exp, VOID_TYPE))
6555 return const0_rtx;
6556 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6557 if (icode != CODE_FOR_nothing)
6559 struct expand_operand op;
6560 /* If the target is not sutitable then create a new target. */
6561 if (target == NULL_RTX
6562 || !REG_P (target)
6563 || GET_MODE (target) != Pmode)
6564 target = gen_reg_rtx (Pmode);
6565 create_output_operand (&op, target, Pmode);
6566 expand_insn (icode, 1, &op);
6567 return target;
6569 error ("__builtin_thread_pointer is not supported on this target");
6570 return const0_rtx;
6573 static void
6574 expand_builtin_set_thread_pointer (tree exp)
6576 enum insn_code icode;
6577 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6578 return;
6579 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6580 if (icode != CODE_FOR_nothing)
6582 struct expand_operand op;
6583 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6584 Pmode, EXPAND_NORMAL);
6585 create_input_operand (&op, val, Pmode);
6586 expand_insn (icode, 1, &op);
6587 return;
6589 error ("__builtin_set_thread_pointer is not supported on this target");
6593 /* Emit code to restore the current value of stack. */
6595 static void
6596 expand_stack_restore (tree var)
6598 rtx_insn *prev;
6599 rtx sa = expand_normal (var);
6601 sa = convert_memory_address (Pmode, sa);
6603 prev = get_last_insn ();
6604 emit_stack_restore (SAVE_BLOCK, sa);
6606 record_new_stack_level ();
6608 fixup_args_size_notes (prev, get_last_insn (), 0);
6611 /* Emit code to save the current value of stack. */
6613 static rtx
6614 expand_stack_save (void)
6616 rtx ret = NULL_RTX;
6618 emit_stack_save (SAVE_BLOCK, &ret);
6619 return ret;
6623 /* Expand an expression EXP that calls a built-in function,
6624 with result going to TARGET if that's convenient
6625 (and in mode MODE if that's convenient).
6626 SUBTARGET may be used as the target for computing one of EXP's operands.
6627 IGNORE is nonzero if the value is to be ignored. */
6630 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6631 int ignore)
6633 tree fndecl = get_callee_fndecl (exp);
6634 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6635 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6636 int flags;
6638 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6639 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6641 /* When ASan is enabled, we don't want to expand some memory/string
6642 builtins and rely on libsanitizer's hooks. This allows us to avoid
6643 redundant checks and be sure, that possible overflow will be detected
6644 by ASan. */
6646 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6647 return expand_call (exp, target, ignore);
6649 /* When not optimizing, generate calls to library functions for a certain
6650 set of builtins. */
6651 if (!optimize
6652 && !called_as_built_in (fndecl)
6653 && fcode != BUILT_IN_FORK
6654 && fcode != BUILT_IN_EXECL
6655 && fcode != BUILT_IN_EXECV
6656 && fcode != BUILT_IN_EXECLP
6657 && fcode != BUILT_IN_EXECLE
6658 && fcode != BUILT_IN_EXECVP
6659 && fcode != BUILT_IN_EXECVE
6660 && !ALLOCA_FUNCTION_CODE_P (fcode)
6661 && fcode != BUILT_IN_FREE
6662 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6663 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6664 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6665 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6666 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6667 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6668 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6669 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6670 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6671 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6672 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6673 && fcode != BUILT_IN_CHKP_BNDRET)
6674 return expand_call (exp, target, ignore);
6676 /* The built-in function expanders test for target == const0_rtx
6677 to determine whether the function's result will be ignored. */
6678 if (ignore)
6679 target = const0_rtx;
6681 /* If the result of a pure or const built-in function is ignored, and
6682 none of its arguments are volatile, we can avoid expanding the
6683 built-in call and just evaluate the arguments for side-effects. */
6684 if (target == const0_rtx
6685 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6686 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6688 bool volatilep = false;
6689 tree arg;
6690 call_expr_arg_iterator iter;
6692 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6693 if (TREE_THIS_VOLATILE (arg))
6695 volatilep = true;
6696 break;
6699 if (! volatilep)
6701 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6702 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6703 return const0_rtx;
6707 /* expand_builtin_with_bounds is supposed to be used for
6708 instrumented builtin calls. */
6709 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6711 switch (fcode)
6713 CASE_FLT_FN (BUILT_IN_FABS):
6714 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6715 case BUILT_IN_FABSD32:
6716 case BUILT_IN_FABSD64:
6717 case BUILT_IN_FABSD128:
6718 target = expand_builtin_fabs (exp, target, subtarget);
6719 if (target)
6720 return target;
6721 break;
6723 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6724 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6725 target = expand_builtin_copysign (exp, target, subtarget);
6726 if (target)
6727 return target;
6728 break;
6730 /* Just do a normal library call if we were unable to fold
6731 the values. */
6732 CASE_FLT_FN (BUILT_IN_CABS):
6733 break;
6735 CASE_FLT_FN (BUILT_IN_FMA):
6736 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6737 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6738 if (target)
6739 return target;
6740 break;
6742 CASE_FLT_FN (BUILT_IN_ILOGB):
6743 if (! flag_unsafe_math_optimizations)
6744 break;
6745 gcc_fallthrough ();
6746 CASE_FLT_FN (BUILT_IN_ISINF):
6747 CASE_FLT_FN (BUILT_IN_FINITE):
6748 case BUILT_IN_ISFINITE:
6749 case BUILT_IN_ISNORMAL:
6750 target = expand_builtin_interclass_mathfn (exp, target);
6751 if (target)
6752 return target;
6753 break;
6755 CASE_FLT_FN (BUILT_IN_ICEIL):
6756 CASE_FLT_FN (BUILT_IN_LCEIL):
6757 CASE_FLT_FN (BUILT_IN_LLCEIL):
6758 CASE_FLT_FN (BUILT_IN_LFLOOR):
6759 CASE_FLT_FN (BUILT_IN_IFLOOR):
6760 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6761 target = expand_builtin_int_roundingfn (exp, target);
6762 if (target)
6763 return target;
6764 break;
6766 CASE_FLT_FN (BUILT_IN_IRINT):
6767 CASE_FLT_FN (BUILT_IN_LRINT):
6768 CASE_FLT_FN (BUILT_IN_LLRINT):
6769 CASE_FLT_FN (BUILT_IN_IROUND):
6770 CASE_FLT_FN (BUILT_IN_LROUND):
6771 CASE_FLT_FN (BUILT_IN_LLROUND):
6772 target = expand_builtin_int_roundingfn_2 (exp, target);
6773 if (target)
6774 return target;
6775 break;
6777 CASE_FLT_FN (BUILT_IN_POWI):
6778 target = expand_builtin_powi (exp, target);
6779 if (target)
6780 return target;
6781 break;
6783 CASE_FLT_FN (BUILT_IN_CEXPI):
6784 target = expand_builtin_cexpi (exp, target);
6785 gcc_assert (target);
6786 return target;
6788 CASE_FLT_FN (BUILT_IN_SIN):
6789 CASE_FLT_FN (BUILT_IN_COS):
6790 if (! flag_unsafe_math_optimizations)
6791 break;
6792 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6793 if (target)
6794 return target;
6795 break;
6797 CASE_FLT_FN (BUILT_IN_SINCOS):
6798 if (! flag_unsafe_math_optimizations)
6799 break;
6800 target = expand_builtin_sincos (exp);
6801 if (target)
6802 return target;
6803 break;
6805 case BUILT_IN_APPLY_ARGS:
6806 return expand_builtin_apply_args ();
6808 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6809 FUNCTION with a copy of the parameters described by
6810 ARGUMENTS, and ARGSIZE. It returns a block of memory
6811 allocated on the stack into which is stored all the registers
6812 that might possibly be used for returning the result of a
6813 function. ARGUMENTS is the value returned by
6814 __builtin_apply_args. ARGSIZE is the number of bytes of
6815 arguments that must be copied. ??? How should this value be
6816 computed? We'll also need a safe worst case value for varargs
6817 functions. */
6818 case BUILT_IN_APPLY:
6819 if (!validate_arglist (exp, POINTER_TYPE,
6820 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6821 && !validate_arglist (exp, REFERENCE_TYPE,
6822 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6823 return const0_rtx;
6824 else
6826 rtx ops[3];
6828 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6829 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6830 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6832 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6835 /* __builtin_return (RESULT) causes the function to return the
6836 value described by RESULT. RESULT is address of the block of
6837 memory returned by __builtin_apply. */
6838 case BUILT_IN_RETURN:
6839 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6840 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6841 return const0_rtx;
6843 case BUILT_IN_SAVEREGS:
6844 return expand_builtin_saveregs ();
6846 case BUILT_IN_VA_ARG_PACK:
6847 /* All valid uses of __builtin_va_arg_pack () are removed during
6848 inlining. */
6849 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6850 return const0_rtx;
6852 case BUILT_IN_VA_ARG_PACK_LEN:
6853 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6854 inlining. */
6855 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6856 return const0_rtx;
6858 /* Return the address of the first anonymous stack arg. */
6859 case BUILT_IN_NEXT_ARG:
6860 if (fold_builtin_next_arg (exp, false))
6861 return const0_rtx;
6862 return expand_builtin_next_arg ();
6864 case BUILT_IN_CLEAR_CACHE:
6865 target = expand_builtin___clear_cache (exp);
6866 if (target)
6867 return target;
6868 break;
6870 case BUILT_IN_CLASSIFY_TYPE:
6871 return expand_builtin_classify_type (exp);
6873 case BUILT_IN_CONSTANT_P:
6874 return const0_rtx;
6876 case BUILT_IN_FRAME_ADDRESS:
6877 case BUILT_IN_RETURN_ADDRESS:
6878 return expand_builtin_frame_address (fndecl, exp);
6880 /* Returns the address of the area where the structure is returned.
6881 0 otherwise. */
6882 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6883 if (call_expr_nargs (exp) != 0
6884 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6885 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6886 return const0_rtx;
6887 else
6888 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6890 CASE_BUILT_IN_ALLOCA:
6891 target = expand_builtin_alloca (exp);
6892 if (target)
6893 return target;
6894 break;
6896 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6897 return expand_asan_emit_allocas_unpoison (exp);
6899 case BUILT_IN_STACK_SAVE:
6900 return expand_stack_save ();
6902 case BUILT_IN_STACK_RESTORE:
6903 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6904 return const0_rtx;
6906 case BUILT_IN_BSWAP16:
6907 case BUILT_IN_BSWAP32:
6908 case BUILT_IN_BSWAP64:
6909 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6910 if (target)
6911 return target;
6912 break;
6914 CASE_INT_FN (BUILT_IN_FFS):
6915 target = expand_builtin_unop (target_mode, exp, target,
6916 subtarget, ffs_optab);
6917 if (target)
6918 return target;
6919 break;
6921 CASE_INT_FN (BUILT_IN_CLZ):
6922 target = expand_builtin_unop (target_mode, exp, target,
6923 subtarget, clz_optab);
6924 if (target)
6925 return target;
6926 break;
6928 CASE_INT_FN (BUILT_IN_CTZ):
6929 target = expand_builtin_unop (target_mode, exp, target,
6930 subtarget, ctz_optab);
6931 if (target)
6932 return target;
6933 break;
6935 CASE_INT_FN (BUILT_IN_CLRSB):
6936 target = expand_builtin_unop (target_mode, exp, target,
6937 subtarget, clrsb_optab);
6938 if (target)
6939 return target;
6940 break;
6942 CASE_INT_FN (BUILT_IN_POPCOUNT):
6943 target = expand_builtin_unop (target_mode, exp, target,
6944 subtarget, popcount_optab);
6945 if (target)
6946 return target;
6947 break;
6949 CASE_INT_FN (BUILT_IN_PARITY):
6950 target = expand_builtin_unop (target_mode, exp, target,
6951 subtarget, parity_optab);
6952 if (target)
6953 return target;
6954 break;
6956 case BUILT_IN_STRLEN:
6957 target = expand_builtin_strlen (exp, target, target_mode);
6958 if (target)
6959 return target;
6960 break;
6962 case BUILT_IN_STRCAT:
6963 target = expand_builtin_strcat (exp, target);
6964 if (target)
6965 return target;
6966 break;
6968 case BUILT_IN_STRCPY:
6969 target = expand_builtin_strcpy (exp, target);
6970 if (target)
6971 return target;
6972 break;
6974 case BUILT_IN_STRNCAT:
6975 target = expand_builtin_strncat (exp, target);
6976 if (target)
6977 return target;
6978 break;
6980 case BUILT_IN_STRNCPY:
6981 target = expand_builtin_strncpy (exp, target);
6982 if (target)
6983 return target;
6984 break;
6986 case BUILT_IN_STPCPY:
6987 target = expand_builtin_stpcpy (exp, target, mode);
6988 if (target)
6989 return target;
6990 break;
6992 case BUILT_IN_STPNCPY:
6993 target = expand_builtin_stpncpy (exp, target);
6994 if (target)
6995 return target;
6996 break;
6998 case BUILT_IN_MEMCHR:
6999 target = expand_builtin_memchr (exp, target);
7000 if (target)
7001 return target;
7002 break;
7004 case BUILT_IN_MEMCPY:
7005 target = expand_builtin_memcpy (exp, target);
7006 if (target)
7007 return target;
7008 break;
7010 case BUILT_IN_MEMMOVE:
7011 target = expand_builtin_memmove (exp, target);
7012 if (target)
7013 return target;
7014 break;
7016 case BUILT_IN_MEMPCPY:
7017 target = expand_builtin_mempcpy (exp, target);
7018 if (target)
7019 return target;
7020 break;
7022 case BUILT_IN_MEMSET:
7023 target = expand_builtin_memset (exp, target, mode);
7024 if (target)
7025 return target;
7026 break;
7028 case BUILT_IN_BZERO:
7029 target = expand_builtin_bzero (exp);
7030 if (target)
7031 return target;
7032 break;
7034 case BUILT_IN_STRCMP:
7035 target = expand_builtin_strcmp (exp, target);
7036 if (target)
7037 return target;
7038 break;
7040 case BUILT_IN_STRNCMP:
7041 target = expand_builtin_strncmp (exp, target, mode);
7042 if (target)
7043 return target;
7044 break;
7046 case BUILT_IN_BCMP:
7047 case BUILT_IN_MEMCMP:
7048 case BUILT_IN_MEMCMP_EQ:
7049 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7050 if (target)
7051 return target;
7052 if (fcode == BUILT_IN_MEMCMP_EQ)
7054 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7055 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7057 break;
7059 case BUILT_IN_SETJMP:
7060 /* This should have been lowered to the builtins below. */
7061 gcc_unreachable ();
7063 case BUILT_IN_SETJMP_SETUP:
7064 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7065 and the receiver label. */
7066 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7068 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7069 VOIDmode, EXPAND_NORMAL);
7070 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7071 rtx_insn *label_r = label_rtx (label);
7073 /* This is copied from the handling of non-local gotos. */
7074 expand_builtin_setjmp_setup (buf_addr, label_r);
7075 nonlocal_goto_handler_labels
7076 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7077 nonlocal_goto_handler_labels);
7078 /* ??? Do not let expand_label treat us as such since we would
7079 not want to be both on the list of non-local labels and on
7080 the list of forced labels. */
7081 FORCED_LABEL (label) = 0;
7082 return const0_rtx;
7084 break;
7086 case BUILT_IN_SETJMP_RECEIVER:
7087 /* __builtin_setjmp_receiver is passed the receiver label. */
7088 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7090 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7091 rtx_insn *label_r = label_rtx (label);
7093 expand_builtin_setjmp_receiver (label_r);
7094 return const0_rtx;
7096 break;
7098 /* __builtin_longjmp is passed a pointer to an array of five words.
7099 It's similar to the C library longjmp function but works with
7100 __builtin_setjmp above. */
7101 case BUILT_IN_LONGJMP:
7102 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7104 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7105 VOIDmode, EXPAND_NORMAL);
7106 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7108 if (value != const1_rtx)
7110 error ("%<__builtin_longjmp%> second argument must be 1");
7111 return const0_rtx;
7114 expand_builtin_longjmp (buf_addr, value);
7115 return const0_rtx;
7117 break;
7119 case BUILT_IN_NONLOCAL_GOTO:
7120 target = expand_builtin_nonlocal_goto (exp);
7121 if (target)
7122 return target;
7123 break;
7125 /* This updates the setjmp buffer that is its argument with the value
7126 of the current stack pointer. */
7127 case BUILT_IN_UPDATE_SETJMP_BUF:
7128 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7130 rtx buf_addr
7131 = expand_normal (CALL_EXPR_ARG (exp, 0));
7133 expand_builtin_update_setjmp_buf (buf_addr);
7134 return const0_rtx;
7136 break;
7138 case BUILT_IN_TRAP:
7139 expand_builtin_trap ();
7140 return const0_rtx;
7142 case BUILT_IN_UNREACHABLE:
7143 expand_builtin_unreachable ();
7144 return const0_rtx;
7146 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7147 case BUILT_IN_SIGNBITD32:
7148 case BUILT_IN_SIGNBITD64:
7149 case BUILT_IN_SIGNBITD128:
7150 target = expand_builtin_signbit (exp, target);
7151 if (target)
7152 return target;
7153 break;
7155 /* Various hooks for the DWARF 2 __throw routine. */
7156 case BUILT_IN_UNWIND_INIT:
7157 expand_builtin_unwind_init ();
7158 return const0_rtx;
7159 case BUILT_IN_DWARF_CFA:
7160 return virtual_cfa_rtx;
7161 #ifdef DWARF2_UNWIND_INFO
7162 case BUILT_IN_DWARF_SP_COLUMN:
7163 return expand_builtin_dwarf_sp_column ();
7164 case BUILT_IN_INIT_DWARF_REG_SIZES:
7165 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7166 return const0_rtx;
7167 #endif
7168 case BUILT_IN_FROB_RETURN_ADDR:
7169 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7170 case BUILT_IN_EXTRACT_RETURN_ADDR:
7171 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7172 case BUILT_IN_EH_RETURN:
7173 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7174 CALL_EXPR_ARG (exp, 1));
7175 return const0_rtx;
7176 case BUILT_IN_EH_RETURN_DATA_REGNO:
7177 return expand_builtin_eh_return_data_regno (exp);
7178 case BUILT_IN_EXTEND_POINTER:
7179 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7180 case BUILT_IN_EH_POINTER:
7181 return expand_builtin_eh_pointer (exp);
7182 case BUILT_IN_EH_FILTER:
7183 return expand_builtin_eh_filter (exp);
7184 case BUILT_IN_EH_COPY_VALUES:
7185 return expand_builtin_eh_copy_values (exp);
7187 case BUILT_IN_VA_START:
7188 return expand_builtin_va_start (exp);
7189 case BUILT_IN_VA_END:
7190 return expand_builtin_va_end (exp);
7191 case BUILT_IN_VA_COPY:
7192 return expand_builtin_va_copy (exp);
7193 case BUILT_IN_EXPECT:
7194 return expand_builtin_expect (exp, target);
7195 case BUILT_IN_ASSUME_ALIGNED:
7196 return expand_builtin_assume_aligned (exp, target);
7197 case BUILT_IN_PREFETCH:
7198 expand_builtin_prefetch (exp);
7199 return const0_rtx;
7201 case BUILT_IN_INIT_TRAMPOLINE:
7202 return expand_builtin_init_trampoline (exp, true);
7203 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7204 return expand_builtin_init_trampoline (exp, false);
7205 case BUILT_IN_ADJUST_TRAMPOLINE:
7206 return expand_builtin_adjust_trampoline (exp);
7208 case BUILT_IN_INIT_DESCRIPTOR:
7209 return expand_builtin_init_descriptor (exp);
7210 case BUILT_IN_ADJUST_DESCRIPTOR:
7211 return expand_builtin_adjust_descriptor (exp);
7213 case BUILT_IN_FORK:
7214 case BUILT_IN_EXECL:
7215 case BUILT_IN_EXECV:
7216 case BUILT_IN_EXECLP:
7217 case BUILT_IN_EXECLE:
7218 case BUILT_IN_EXECVP:
7219 case BUILT_IN_EXECVE:
7220 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7221 if (target)
7222 return target;
7223 break;
7225 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7226 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7227 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7228 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7229 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7230 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7231 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7232 if (target)
7233 return target;
7234 break;
7236 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7237 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7238 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7239 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7240 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7241 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7242 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7243 if (target)
7244 return target;
7245 break;
7247 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7248 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7249 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7250 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7251 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7253 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7254 if (target)
7255 return target;
7256 break;
7258 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7259 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7260 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7261 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7262 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7263 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7264 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7265 if (target)
7266 return target;
7267 break;
7269 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7270 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7271 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7272 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7273 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7274 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7275 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7276 if (target)
7277 return target;
7278 break;
7280 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7281 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7282 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7283 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7284 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7285 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7286 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7287 if (target)
7288 return target;
7289 break;
7291 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7292 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7293 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7294 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7295 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7296 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7297 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7298 if (target)
7299 return target;
7300 break;
7302 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7303 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7304 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7305 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7306 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7307 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7308 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7309 if (target)
7310 return target;
7311 break;
7313 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7314 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7315 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7316 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7317 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7318 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7319 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7320 if (target)
7321 return target;
7322 break;
7324 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7325 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7326 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7327 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7328 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7329 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7330 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7331 if (target)
7332 return target;
7333 break;
7335 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7336 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7337 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7338 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7339 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7340 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7341 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7342 if (target)
7343 return target;
7344 break;
7346 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7347 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7348 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7349 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7350 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7351 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7352 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7353 if (target)
7354 return target;
7355 break;
7357 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7358 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7359 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7360 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7361 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7362 if (mode == VOIDmode)
7363 mode = TYPE_MODE (boolean_type_node);
7364 if (!target || !register_operand (target, mode))
7365 target = gen_reg_rtx (mode);
7367 mode = get_builtin_sync_mode
7368 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7369 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7370 if (target)
7371 return target;
7372 break;
7374 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7375 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7376 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7377 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7378 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7379 mode = get_builtin_sync_mode
7380 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7381 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7382 if (target)
7383 return target;
7384 break;
7386 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7387 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7388 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7389 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7390 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7391 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7392 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7393 if (target)
7394 return target;
7395 break;
7397 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7398 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7399 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7400 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7401 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7402 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7403 expand_builtin_sync_lock_release (mode, exp);
7404 return const0_rtx;
7406 case BUILT_IN_SYNC_SYNCHRONIZE:
7407 expand_builtin_sync_synchronize ();
7408 return const0_rtx;
7410 case BUILT_IN_ATOMIC_EXCHANGE_1:
7411 case BUILT_IN_ATOMIC_EXCHANGE_2:
7412 case BUILT_IN_ATOMIC_EXCHANGE_4:
7413 case BUILT_IN_ATOMIC_EXCHANGE_8:
7414 case BUILT_IN_ATOMIC_EXCHANGE_16:
7415 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7416 target = expand_builtin_atomic_exchange (mode, exp, target);
7417 if (target)
7418 return target;
7419 break;
7421 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7422 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7423 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7424 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7425 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7427 unsigned int nargs, z;
7428 vec<tree, va_gc> *vec;
7430 mode =
7431 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7432 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7433 if (target)
7434 return target;
7436 /* If this is turned into an external library call, the weak parameter
7437 must be dropped to match the expected parameter list. */
7438 nargs = call_expr_nargs (exp);
7439 vec_alloc (vec, nargs - 1);
7440 for (z = 0; z < 3; z++)
7441 vec->quick_push (CALL_EXPR_ARG (exp, z));
7442 /* Skip the boolean weak parameter. */
7443 for (z = 4; z < 6; z++)
7444 vec->quick_push (CALL_EXPR_ARG (exp, z));
7445 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7446 break;
7449 case BUILT_IN_ATOMIC_LOAD_1:
7450 case BUILT_IN_ATOMIC_LOAD_2:
7451 case BUILT_IN_ATOMIC_LOAD_4:
7452 case BUILT_IN_ATOMIC_LOAD_8:
7453 case BUILT_IN_ATOMIC_LOAD_16:
7454 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7455 target = expand_builtin_atomic_load (mode, exp, target);
7456 if (target)
7457 return target;
7458 break;
7460 case BUILT_IN_ATOMIC_STORE_1:
7461 case BUILT_IN_ATOMIC_STORE_2:
7462 case BUILT_IN_ATOMIC_STORE_4:
7463 case BUILT_IN_ATOMIC_STORE_8:
7464 case BUILT_IN_ATOMIC_STORE_16:
7465 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7466 target = expand_builtin_atomic_store (mode, exp);
7467 if (target)
7468 return const0_rtx;
7469 break;
7471 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7472 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7473 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7474 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7475 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7477 enum built_in_function lib;
7478 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7479 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7480 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7481 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7482 ignore, lib);
7483 if (target)
7484 return target;
7485 break;
7487 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7488 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7489 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7490 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7491 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7493 enum built_in_function lib;
7494 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7495 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7496 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7497 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7498 ignore, lib);
7499 if (target)
7500 return target;
7501 break;
7503 case BUILT_IN_ATOMIC_AND_FETCH_1:
7504 case BUILT_IN_ATOMIC_AND_FETCH_2:
7505 case BUILT_IN_ATOMIC_AND_FETCH_4:
7506 case BUILT_IN_ATOMIC_AND_FETCH_8:
7507 case BUILT_IN_ATOMIC_AND_FETCH_16:
7509 enum built_in_function lib;
7510 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7511 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7512 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7513 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7514 ignore, lib);
7515 if (target)
7516 return target;
7517 break;
7519 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7520 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7521 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7522 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7523 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7525 enum built_in_function lib;
7526 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7527 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7528 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7529 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7530 ignore, lib);
7531 if (target)
7532 return target;
7533 break;
7535 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7536 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7537 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7538 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7539 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7541 enum built_in_function lib;
7542 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7543 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7544 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7545 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7546 ignore, lib);
7547 if (target)
7548 return target;
7549 break;
7551 case BUILT_IN_ATOMIC_OR_FETCH_1:
7552 case BUILT_IN_ATOMIC_OR_FETCH_2:
7553 case BUILT_IN_ATOMIC_OR_FETCH_4:
7554 case BUILT_IN_ATOMIC_OR_FETCH_8:
7555 case BUILT_IN_ATOMIC_OR_FETCH_16:
7557 enum built_in_function lib;
7558 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7559 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7560 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7561 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7562 ignore, lib);
7563 if (target)
7564 return target;
7565 break;
7567 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7568 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7569 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7570 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7571 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7572 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7573 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7574 ignore, BUILT_IN_NONE);
7575 if (target)
7576 return target;
7577 break;
7579 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7580 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7581 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7582 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7583 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7584 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7585 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7586 ignore, BUILT_IN_NONE);
7587 if (target)
7588 return target;
7589 break;
7591 case BUILT_IN_ATOMIC_FETCH_AND_1:
7592 case BUILT_IN_ATOMIC_FETCH_AND_2:
7593 case BUILT_IN_ATOMIC_FETCH_AND_4:
7594 case BUILT_IN_ATOMIC_FETCH_AND_8:
7595 case BUILT_IN_ATOMIC_FETCH_AND_16:
7596 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7597 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7598 ignore, BUILT_IN_NONE);
7599 if (target)
7600 return target;
7601 break;
7603 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7604 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7605 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7606 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7607 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7608 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7609 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7610 ignore, BUILT_IN_NONE);
7611 if (target)
7612 return target;
7613 break;
7615 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7616 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7617 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7618 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7619 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7621 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7622 ignore, BUILT_IN_NONE);
7623 if (target)
7624 return target;
7625 break;
7627 case BUILT_IN_ATOMIC_FETCH_OR_1:
7628 case BUILT_IN_ATOMIC_FETCH_OR_2:
7629 case BUILT_IN_ATOMIC_FETCH_OR_4:
7630 case BUILT_IN_ATOMIC_FETCH_OR_8:
7631 case BUILT_IN_ATOMIC_FETCH_OR_16:
7632 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7633 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7634 ignore, BUILT_IN_NONE);
7635 if (target)
7636 return target;
7637 break;
7639 case BUILT_IN_ATOMIC_TEST_AND_SET:
7640 return expand_builtin_atomic_test_and_set (exp, target);
7642 case BUILT_IN_ATOMIC_CLEAR:
7643 return expand_builtin_atomic_clear (exp);
7645 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7646 return expand_builtin_atomic_always_lock_free (exp);
7648 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7649 target = expand_builtin_atomic_is_lock_free (exp);
7650 if (target)
7651 return target;
7652 break;
7654 case BUILT_IN_ATOMIC_THREAD_FENCE:
7655 expand_builtin_atomic_thread_fence (exp);
7656 return const0_rtx;
7658 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7659 expand_builtin_atomic_signal_fence (exp);
7660 return const0_rtx;
7662 case BUILT_IN_OBJECT_SIZE:
7663 return expand_builtin_object_size (exp);
7665 case BUILT_IN_MEMCPY_CHK:
7666 case BUILT_IN_MEMPCPY_CHK:
7667 case BUILT_IN_MEMMOVE_CHK:
7668 case BUILT_IN_MEMSET_CHK:
7669 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7670 if (target)
7671 return target;
7672 break;
7674 case BUILT_IN_STRCPY_CHK:
7675 case BUILT_IN_STPCPY_CHK:
7676 case BUILT_IN_STRNCPY_CHK:
7677 case BUILT_IN_STPNCPY_CHK:
7678 case BUILT_IN_STRCAT_CHK:
7679 case BUILT_IN_STRNCAT_CHK:
7680 case BUILT_IN_SNPRINTF_CHK:
7681 case BUILT_IN_VSNPRINTF_CHK:
7682 maybe_emit_chk_warning (exp, fcode);
7683 break;
7685 case BUILT_IN_SPRINTF_CHK:
7686 case BUILT_IN_VSPRINTF_CHK:
7687 maybe_emit_sprintf_chk_warning (exp, fcode);
7688 break;
7690 case BUILT_IN_FREE:
7691 if (warn_free_nonheap_object)
7692 maybe_emit_free_warning (exp);
7693 break;
7695 case BUILT_IN_THREAD_POINTER:
7696 return expand_builtin_thread_pointer (exp, target);
7698 case BUILT_IN_SET_THREAD_POINTER:
7699 expand_builtin_set_thread_pointer (exp);
7700 return const0_rtx;
7702 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7703 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7704 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7705 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7706 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7707 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7708 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7709 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7710 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7711 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7712 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7713 /* We allow user CHKP builtins if Pointer Bounds
7714 Checker is off. */
7715 if (!chkp_function_instrumented_p (current_function_decl))
7717 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7718 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7719 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7720 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7721 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7722 return expand_normal (CALL_EXPR_ARG (exp, 0));
7723 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7724 return expand_normal (size_zero_node);
7725 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7726 return expand_normal (size_int (-1));
7727 else
7728 return const0_rtx;
7730 /* FALLTHROUGH */
7732 case BUILT_IN_CHKP_BNDMK:
7733 case BUILT_IN_CHKP_BNDSTX:
7734 case BUILT_IN_CHKP_BNDCL:
7735 case BUILT_IN_CHKP_BNDCU:
7736 case BUILT_IN_CHKP_BNDLDX:
7737 case BUILT_IN_CHKP_BNDRET:
7738 case BUILT_IN_CHKP_INTERSECT:
7739 case BUILT_IN_CHKP_NARROW:
7740 case BUILT_IN_CHKP_EXTRACT_LOWER:
7741 case BUILT_IN_CHKP_EXTRACT_UPPER:
7742 /* Software implementation of Pointer Bounds Checker is NYI.
7743 Target support is required. */
7744 error ("Your target platform does not support -fcheck-pointer-bounds");
7745 break;
7747 case BUILT_IN_ACC_ON_DEVICE:
7748 /* Do library call, if we failed to expand the builtin when
7749 folding. */
7750 break;
7752 default: /* just do library call, if unknown builtin */
7753 break;
7756 /* The switch statement above can drop through to cause the function
7757 to be called normally. */
7758 return expand_call (exp, target, ignore);
7761 /* Similar to expand_builtin but is used for instrumented calls. */
7764 expand_builtin_with_bounds (tree exp, rtx target,
7765 rtx subtarget ATTRIBUTE_UNUSED,
7766 machine_mode mode, int ignore)
7768 tree fndecl = get_callee_fndecl (exp);
7769 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7771 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7773 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7774 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7776 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7777 && fcode < END_CHKP_BUILTINS);
7779 switch (fcode)
7781 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7782 target = expand_builtin_memcpy_with_bounds (exp, target);
7783 if (target)
7784 return target;
7785 break;
7787 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7788 target = expand_builtin_mempcpy_with_bounds (exp, target);
7789 if (target)
7790 return target;
7791 break;
7793 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7794 target = expand_builtin_memset_with_bounds (exp, target, mode);
7795 if (target)
7796 return target;
7797 break;
7799 case BUILT_IN_MEMCPY_CHKP:
7800 case BUILT_IN_MEMMOVE_CHKP:
7801 case BUILT_IN_MEMPCPY_CHKP:
7802 if (call_expr_nargs (exp) > 3)
7804 /* memcpy_chkp (void *dst, size_t dstbnd,
7805 const void *src, size_t srcbnd, size_t n)
7806 and others take a pointer bound argument just after each
7807 pointer argument. */
7808 tree dest = CALL_EXPR_ARG (exp, 0);
7809 tree src = CALL_EXPR_ARG (exp, 2);
7810 tree len = CALL_EXPR_ARG (exp, 4);
7812 check_memop_access (exp, dest, src, len);
7813 break;
7816 default:
7817 break;
7820 /* The switch statement above can drop through to cause the function
7821 to be called normally. */
7822 return expand_call (exp, target, ignore);
7825 /* Determine whether a tree node represents a call to a built-in
7826 function. If the tree T is a call to a built-in function with
7827 the right number of arguments of the appropriate types, return
7828 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7829 Otherwise the return value is END_BUILTINS. */
7831 enum built_in_function
7832 builtin_mathfn_code (const_tree t)
7834 const_tree fndecl, arg, parmlist;
7835 const_tree argtype, parmtype;
7836 const_call_expr_arg_iterator iter;
7838 if (TREE_CODE (t) != CALL_EXPR
7839 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7840 return END_BUILTINS;
7842 fndecl = get_callee_fndecl (t);
7843 if (fndecl == NULL_TREE
7844 || TREE_CODE (fndecl) != FUNCTION_DECL
7845 || ! DECL_BUILT_IN (fndecl)
7846 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7847 return END_BUILTINS;
7849 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7850 init_const_call_expr_arg_iterator (t, &iter);
7851 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7853 /* If a function doesn't take a variable number of arguments,
7854 the last element in the list will have type `void'. */
7855 parmtype = TREE_VALUE (parmlist);
7856 if (VOID_TYPE_P (parmtype))
7858 if (more_const_call_expr_args_p (&iter))
7859 return END_BUILTINS;
7860 return DECL_FUNCTION_CODE (fndecl);
7863 if (! more_const_call_expr_args_p (&iter))
7864 return END_BUILTINS;
7866 arg = next_const_call_expr_arg (&iter);
7867 argtype = TREE_TYPE (arg);
7869 if (SCALAR_FLOAT_TYPE_P (parmtype))
7871 if (! SCALAR_FLOAT_TYPE_P (argtype))
7872 return END_BUILTINS;
7874 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7876 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7877 return END_BUILTINS;
7879 else if (POINTER_TYPE_P (parmtype))
7881 if (! POINTER_TYPE_P (argtype))
7882 return END_BUILTINS;
7884 else if (INTEGRAL_TYPE_P (parmtype))
7886 if (! INTEGRAL_TYPE_P (argtype))
7887 return END_BUILTINS;
7889 else
7890 return END_BUILTINS;
7893 /* Variable-length argument list. */
7894 return DECL_FUNCTION_CODE (fndecl);
7897 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7898 evaluate to a constant. */
7900 static tree
7901 fold_builtin_constant_p (tree arg)
7903 /* We return 1 for a numeric type that's known to be a constant
7904 value at compile-time or for an aggregate type that's a
7905 literal constant. */
7906 STRIP_NOPS (arg);
7908 /* If we know this is a constant, emit the constant of one. */
7909 if (CONSTANT_CLASS_P (arg)
7910 || (TREE_CODE (arg) == CONSTRUCTOR
7911 && TREE_CONSTANT (arg)))
7912 return integer_one_node;
7913 if (TREE_CODE (arg) == ADDR_EXPR)
7915 tree op = TREE_OPERAND (arg, 0);
7916 if (TREE_CODE (op) == STRING_CST
7917 || (TREE_CODE (op) == ARRAY_REF
7918 && integer_zerop (TREE_OPERAND (op, 1))
7919 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7920 return integer_one_node;
7923 /* If this expression has side effects, show we don't know it to be a
7924 constant. Likewise if it's a pointer or aggregate type since in
7925 those case we only want literals, since those are only optimized
7926 when generating RTL, not later.
7927 And finally, if we are compiling an initializer, not code, we
7928 need to return a definite result now; there's not going to be any
7929 more optimization done. */
7930 if (TREE_SIDE_EFFECTS (arg)
7931 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7932 || POINTER_TYPE_P (TREE_TYPE (arg))
7933 || cfun == 0
7934 || folding_initializer
7935 || force_folding_builtin_constant_p)
7936 return integer_zero_node;
7938 return NULL_TREE;
7941 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7942 return it as a truthvalue. */
7944 static tree
7945 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7946 tree predictor)
7948 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7950 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7951 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7952 ret_type = TREE_TYPE (TREE_TYPE (fn));
7953 pred_type = TREE_VALUE (arg_types);
7954 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7956 pred = fold_convert_loc (loc, pred_type, pred);
7957 expected = fold_convert_loc (loc, expected_type, expected);
7958 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7959 predictor);
7961 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7962 build_int_cst (ret_type, 0));
7965 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7966 NULL_TREE if no simplification is possible. */
7968 tree
7969 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7971 tree inner, fndecl, inner_arg0;
7972 enum tree_code code;
7974 /* Distribute the expected value over short-circuiting operators.
7975 See through the cast from truthvalue_type_node to long. */
7976 inner_arg0 = arg0;
7977 while (CONVERT_EXPR_P (inner_arg0)
7978 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7979 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7980 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7982 /* If this is a builtin_expect within a builtin_expect keep the
7983 inner one. See through a comparison against a constant. It
7984 might have been added to create a thruthvalue. */
7985 inner = inner_arg0;
7987 if (COMPARISON_CLASS_P (inner)
7988 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7989 inner = TREE_OPERAND (inner, 0);
7991 if (TREE_CODE (inner) == CALL_EXPR
7992 && (fndecl = get_callee_fndecl (inner))
7993 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7994 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7995 return arg0;
7997 inner = inner_arg0;
7998 code = TREE_CODE (inner);
7999 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8001 tree op0 = TREE_OPERAND (inner, 0);
8002 tree op1 = TREE_OPERAND (inner, 1);
8004 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8005 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8006 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8008 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8011 /* If the argument isn't invariant then there's nothing else we can do. */
8012 if (!TREE_CONSTANT (inner_arg0))
8013 return NULL_TREE;
8015 /* If we expect that a comparison against the argument will fold to
8016 a constant return the constant. In practice, this means a true
8017 constant or the address of a non-weak symbol. */
8018 inner = inner_arg0;
8019 STRIP_NOPS (inner);
8020 if (TREE_CODE (inner) == ADDR_EXPR)
8024 inner = TREE_OPERAND (inner, 0);
8026 while (TREE_CODE (inner) == COMPONENT_REF
8027 || TREE_CODE (inner) == ARRAY_REF);
8028 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8029 return NULL_TREE;
8032 /* Otherwise, ARG0 already has the proper type for the return value. */
8033 return arg0;
8036 /* Fold a call to __builtin_classify_type with argument ARG. */
8038 static tree
8039 fold_builtin_classify_type (tree arg)
8041 if (arg == 0)
8042 return build_int_cst (integer_type_node, no_type_class);
8044 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8047 /* Fold a call to __builtin_strlen with argument ARG. */
8049 static tree
8050 fold_builtin_strlen (location_t loc, tree type, tree arg)
8052 if (!validate_arg (arg, POINTER_TYPE))
8053 return NULL_TREE;
8054 else
8056 tree len = c_strlen (arg, 0);
8058 if (len)
8059 return fold_convert_loc (loc, type, len);
8061 return NULL_TREE;
8065 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8067 static tree
8068 fold_builtin_inf (location_t loc, tree type, int warn)
8070 REAL_VALUE_TYPE real;
8072 /* __builtin_inff is intended to be usable to define INFINITY on all
8073 targets. If an infinity is not available, INFINITY expands "to a
8074 positive constant of type float that overflows at translation
8075 time", footnote "In this case, using INFINITY will violate the
8076 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8077 Thus we pedwarn to ensure this constraint violation is
8078 diagnosed. */
8079 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8080 pedwarn (loc, 0, "target format does not support infinity");
8082 real_inf (&real);
8083 return build_real (type, real);
8086 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8087 NULL_TREE if no simplification can be made. */
8089 static tree
8090 fold_builtin_sincos (location_t loc,
8091 tree arg0, tree arg1, tree arg2)
8093 tree type;
8094 tree fndecl, call = NULL_TREE;
8096 if (!validate_arg (arg0, REAL_TYPE)
8097 || !validate_arg (arg1, POINTER_TYPE)
8098 || !validate_arg (arg2, POINTER_TYPE))
8099 return NULL_TREE;
8101 type = TREE_TYPE (arg0);
8103 /* Calculate the result when the argument is a constant. */
8104 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8105 if (fn == END_BUILTINS)
8106 return NULL_TREE;
8108 /* Canonicalize sincos to cexpi. */
8109 if (TREE_CODE (arg0) == REAL_CST)
8111 tree complex_type = build_complex_type (type);
8112 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8114 if (!call)
8116 if (!targetm.libc_has_function (function_c99_math_complex)
8117 || !builtin_decl_implicit_p (fn))
8118 return NULL_TREE;
8119 fndecl = builtin_decl_explicit (fn);
8120 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8121 call = builtin_save_expr (call);
8124 return build2 (COMPOUND_EXPR, void_type_node,
8125 build2 (MODIFY_EXPR, void_type_node,
8126 build_fold_indirect_ref_loc (loc, arg1),
8127 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8128 build2 (MODIFY_EXPR, void_type_node,
8129 build_fold_indirect_ref_loc (loc, arg2),
8130 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8133 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8134 Return NULL_TREE if no simplification can be made. */
8136 static tree
8137 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8139 if (!validate_arg (arg1, POINTER_TYPE)
8140 || !validate_arg (arg2, POINTER_TYPE)
8141 || !validate_arg (len, INTEGER_TYPE))
8142 return NULL_TREE;
8144 /* If the LEN parameter is zero, return zero. */
8145 if (integer_zerop (len))
8146 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8147 arg1, arg2);
8149 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8150 if (operand_equal_p (arg1, arg2, 0))
8151 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8153 /* If len parameter is one, return an expression corresponding to
8154 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8155 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8157 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8158 tree cst_uchar_ptr_node
8159 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8161 tree ind1
8162 = fold_convert_loc (loc, integer_type_node,
8163 build1 (INDIRECT_REF, cst_uchar_node,
8164 fold_convert_loc (loc,
8165 cst_uchar_ptr_node,
8166 arg1)));
8167 tree ind2
8168 = fold_convert_loc (loc, integer_type_node,
8169 build1 (INDIRECT_REF, cst_uchar_node,
8170 fold_convert_loc (loc,
8171 cst_uchar_ptr_node,
8172 arg2)));
8173 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8176 return NULL_TREE;
8179 /* Fold a call to builtin isascii with argument ARG. */
8181 static tree
8182 fold_builtin_isascii (location_t loc, tree arg)
8184 if (!validate_arg (arg, INTEGER_TYPE))
8185 return NULL_TREE;
8186 else
8188 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8189 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8190 build_int_cst (integer_type_node,
8191 ~ (unsigned HOST_WIDE_INT) 0x7f));
8192 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8193 arg, integer_zero_node);
8197 /* Fold a call to builtin toascii with argument ARG. */
8199 static tree
8200 fold_builtin_toascii (location_t loc, tree arg)
8202 if (!validate_arg (arg, INTEGER_TYPE))
8203 return NULL_TREE;
8205 /* Transform toascii(c) -> (c & 0x7f). */
8206 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8207 build_int_cst (integer_type_node, 0x7f));
8210 /* Fold a call to builtin isdigit with argument ARG. */
8212 static tree
8213 fold_builtin_isdigit (location_t loc, tree arg)
8215 if (!validate_arg (arg, INTEGER_TYPE))
8216 return NULL_TREE;
8217 else
8219 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8220 /* According to the C standard, isdigit is unaffected by locale.
8221 However, it definitely is affected by the target character set. */
8222 unsigned HOST_WIDE_INT target_digit0
8223 = lang_hooks.to_target_charset ('0');
8225 if (target_digit0 == 0)
8226 return NULL_TREE;
8228 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8229 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8230 build_int_cst (unsigned_type_node, target_digit0));
8231 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8232 build_int_cst (unsigned_type_node, 9));
8236 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8238 static tree
8239 fold_builtin_fabs (location_t loc, tree arg, tree type)
8241 if (!validate_arg (arg, REAL_TYPE))
8242 return NULL_TREE;
8244 arg = fold_convert_loc (loc, type, arg);
8245 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8248 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8250 static tree
8251 fold_builtin_abs (location_t loc, tree arg, tree type)
8253 if (!validate_arg (arg, INTEGER_TYPE))
8254 return NULL_TREE;
8256 arg = fold_convert_loc (loc, type, arg);
8257 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8260 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8262 static tree
8263 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8265 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8266 if (validate_arg (arg0, REAL_TYPE)
8267 && validate_arg (arg1, REAL_TYPE)
8268 && validate_arg (arg2, REAL_TYPE)
8269 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8270 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8272 return NULL_TREE;
8275 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8277 static tree
8278 fold_builtin_carg (location_t loc, tree arg, tree type)
8280 if (validate_arg (arg, COMPLEX_TYPE)
8281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8283 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8285 if (atan2_fn)
8287 tree new_arg = builtin_save_expr (arg);
8288 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8289 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8290 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8294 return NULL_TREE;
8297 /* Fold a call to builtin frexp, we can assume the base is 2. */
8299 static tree
8300 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8302 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8303 return NULL_TREE;
8305 STRIP_NOPS (arg0);
8307 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8308 return NULL_TREE;
8310 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8312 /* Proceed if a valid pointer type was passed in. */
8313 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8315 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8316 tree frac, exp;
8318 switch (value->cl)
8320 case rvc_zero:
8321 /* For +-0, return (*exp = 0, +-0). */
8322 exp = integer_zero_node;
8323 frac = arg0;
8324 break;
8325 case rvc_nan:
8326 case rvc_inf:
8327 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8328 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8329 case rvc_normal:
8331 /* Since the frexp function always expects base 2, and in
8332 GCC normalized significands are already in the range
8333 [0.5, 1.0), we have exactly what frexp wants. */
8334 REAL_VALUE_TYPE frac_rvt = *value;
8335 SET_REAL_EXP (&frac_rvt, 0);
8336 frac = build_real (rettype, frac_rvt);
8337 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8339 break;
8340 default:
8341 gcc_unreachable ();
8344 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8345 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8346 TREE_SIDE_EFFECTS (arg1) = 1;
8347 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8350 return NULL_TREE;
8353 /* Fold a call to builtin modf. */
8355 static tree
8356 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8358 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8359 return NULL_TREE;
8361 STRIP_NOPS (arg0);
8363 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8364 return NULL_TREE;
8366 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8368 /* Proceed if a valid pointer type was passed in. */
8369 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8371 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8372 REAL_VALUE_TYPE trunc, frac;
8374 switch (value->cl)
8376 case rvc_nan:
8377 case rvc_zero:
8378 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8379 trunc = frac = *value;
8380 break;
8381 case rvc_inf:
8382 /* For +-Inf, return (*arg1 = arg0, +-0). */
8383 frac = dconst0;
8384 frac.sign = value->sign;
8385 trunc = *value;
8386 break;
8387 case rvc_normal:
8388 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8389 real_trunc (&trunc, VOIDmode, value);
8390 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8391 /* If the original number was negative and already
8392 integral, then the fractional part is -0.0. */
8393 if (value->sign && frac.cl == rvc_zero)
8394 frac.sign = value->sign;
8395 break;
8398 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8399 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8400 build_real (rettype, trunc));
8401 TREE_SIDE_EFFECTS (arg1) = 1;
8402 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8403 build_real (rettype, frac));
8406 return NULL_TREE;
8409 /* Given a location LOC, an interclass builtin function decl FNDECL
8410 and its single argument ARG, return an folded expression computing
8411 the same, or NULL_TREE if we either couldn't or didn't want to fold
8412 (the latter happen if there's an RTL instruction available). */
8414 static tree
8415 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8417 machine_mode mode;
8419 if (!validate_arg (arg, REAL_TYPE))
8420 return NULL_TREE;
8422 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8423 return NULL_TREE;
8425 mode = TYPE_MODE (TREE_TYPE (arg));
8427 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8429 /* If there is no optab, try generic code. */
8430 switch (DECL_FUNCTION_CODE (fndecl))
8432 tree result;
8434 CASE_FLT_FN (BUILT_IN_ISINF):
8436 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8437 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8438 tree type = TREE_TYPE (arg);
8439 REAL_VALUE_TYPE r;
8440 char buf[128];
8442 if (is_ibm_extended)
8444 /* NaN and Inf are encoded in the high-order double value
8445 only. The low-order value is not significant. */
8446 type = double_type_node;
8447 mode = DFmode;
8448 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8450 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8451 real_from_string (&r, buf);
8452 result = build_call_expr (isgr_fn, 2,
8453 fold_build1_loc (loc, ABS_EXPR, type, arg),
8454 build_real (type, r));
8455 return result;
8457 CASE_FLT_FN (BUILT_IN_FINITE):
8458 case BUILT_IN_ISFINITE:
8460 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8461 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8462 tree type = TREE_TYPE (arg);
8463 REAL_VALUE_TYPE r;
8464 char buf[128];
8466 if (is_ibm_extended)
8468 /* NaN and Inf are encoded in the high-order double value
8469 only. The low-order value is not significant. */
8470 type = double_type_node;
8471 mode = DFmode;
8472 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8474 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8475 real_from_string (&r, buf);
8476 result = build_call_expr (isle_fn, 2,
8477 fold_build1_loc (loc, ABS_EXPR, type, arg),
8478 build_real (type, r));
8479 /*result = fold_build2_loc (loc, UNGT_EXPR,
8480 TREE_TYPE (TREE_TYPE (fndecl)),
8481 fold_build1_loc (loc, ABS_EXPR, type, arg),
8482 build_real (type, r));
8483 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8484 TREE_TYPE (TREE_TYPE (fndecl)),
8485 result);*/
8486 return result;
8488 case BUILT_IN_ISNORMAL:
8490 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8491 islessequal(fabs(x),DBL_MAX). */
8492 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8493 tree type = TREE_TYPE (arg);
8494 tree orig_arg, max_exp, min_exp;
8495 machine_mode orig_mode = mode;
8496 REAL_VALUE_TYPE rmax, rmin;
8497 char buf[128];
8499 orig_arg = arg = builtin_save_expr (arg);
8500 if (is_ibm_extended)
8502 /* Use double to test the normal range of IBM extended
8503 precision. Emin for IBM extended precision is
8504 different to emin for IEEE double, being 53 higher
8505 since the low double exponent is at least 53 lower
8506 than the high double exponent. */
8507 type = double_type_node;
8508 mode = DFmode;
8509 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8511 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8513 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8514 real_from_string (&rmax, buf);
8515 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8516 real_from_string (&rmin, buf);
8517 max_exp = build_real (type, rmax);
8518 min_exp = build_real (type, rmin);
8520 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8521 if (is_ibm_extended)
8523 /* Testing the high end of the range is done just using
8524 the high double, using the same test as isfinite().
8525 For the subnormal end of the range we first test the
8526 high double, then if its magnitude is equal to the
8527 limit of 0x1p-969, we test whether the low double is
8528 non-zero and opposite sign to the high double. */
8529 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8530 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8531 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8532 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8533 arg, min_exp);
8534 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8535 complex_double_type_node, orig_arg);
8536 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8537 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8538 tree zero = build_real (type, dconst0);
8539 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8540 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8541 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8542 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8543 fold_build3 (COND_EXPR,
8544 integer_type_node,
8545 hilt, logt, lolt));
8546 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8547 eq_min, ok_lo);
8548 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8549 gt_min, eq_min);
8551 else
8553 tree const isge_fn
8554 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8555 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8557 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8558 max_exp, min_exp);
8559 return result;
8561 default:
8562 break;
8565 return NULL_TREE;
8568 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8569 ARG is the argument for the call. */
8571 static tree
8572 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8574 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8576 if (!validate_arg (arg, REAL_TYPE))
8577 return NULL_TREE;
8579 switch (builtin_index)
8581 case BUILT_IN_ISINF:
8582 if (!HONOR_INFINITIES (arg))
8583 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8585 return NULL_TREE;
8587 case BUILT_IN_ISINF_SIGN:
8589 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8590 /* In a boolean context, GCC will fold the inner COND_EXPR to
8591 1. So e.g. "if (isinf_sign(x))" would be folded to just
8592 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8593 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8594 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8595 tree tmp = NULL_TREE;
8597 arg = builtin_save_expr (arg);
8599 if (signbit_fn && isinf_fn)
8601 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8602 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8604 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8605 signbit_call, integer_zero_node);
8606 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8607 isinf_call, integer_zero_node);
8609 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8610 integer_minus_one_node, integer_one_node);
8611 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8612 isinf_call, tmp,
8613 integer_zero_node);
8616 return tmp;
8619 case BUILT_IN_ISFINITE:
8620 if (!HONOR_NANS (arg)
8621 && !HONOR_INFINITIES (arg))
8622 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8624 return NULL_TREE;
8626 case BUILT_IN_ISNAN:
8627 if (!HONOR_NANS (arg))
8628 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8631 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8632 if (is_ibm_extended)
8634 /* NaN and Inf are encoded in the high-order double value
8635 only. The low-order value is not significant. */
8636 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8639 arg = builtin_save_expr (arg);
8640 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8642 default:
8643 gcc_unreachable ();
8647 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8648 This builtin will generate code to return the appropriate floating
8649 point classification depending on the value of the floating point
8650 number passed in. The possible return values must be supplied as
8651 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8652 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8653 one floating point argument which is "type generic". */
8655 static tree
8656 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8658 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8659 arg, type, res, tmp;
8660 machine_mode mode;
8661 REAL_VALUE_TYPE r;
8662 char buf[128];
8664 /* Verify the required arguments in the original call. */
8665 if (nargs != 6
8666 || !validate_arg (args[0], INTEGER_TYPE)
8667 || !validate_arg (args[1], INTEGER_TYPE)
8668 || !validate_arg (args[2], INTEGER_TYPE)
8669 || !validate_arg (args[3], INTEGER_TYPE)
8670 || !validate_arg (args[4], INTEGER_TYPE)
8671 || !validate_arg (args[5], REAL_TYPE))
8672 return NULL_TREE;
8674 fp_nan = args[0];
8675 fp_infinite = args[1];
8676 fp_normal = args[2];
8677 fp_subnormal = args[3];
8678 fp_zero = args[4];
8679 arg = args[5];
8680 type = TREE_TYPE (arg);
8681 mode = TYPE_MODE (type);
8682 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8684 /* fpclassify(x) ->
8685 isnan(x) ? FP_NAN :
8686 (fabs(x) == Inf ? FP_INFINITE :
8687 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8688 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8690 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8691 build_real (type, dconst0));
8692 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8693 tmp, fp_zero, fp_subnormal);
8695 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8696 real_from_string (&r, buf);
8697 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8698 arg, build_real (type, r));
8699 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8701 if (HONOR_INFINITIES (mode))
8703 real_inf (&r);
8704 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8705 build_real (type, r));
8706 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8707 fp_infinite, res);
8710 if (HONOR_NANS (mode))
8712 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8713 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8716 return res;
8719 /* Fold a call to an unordered comparison function such as
8720 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8721 being called and ARG0 and ARG1 are the arguments for the call.
8722 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8723 the opposite of the desired result. UNORDERED_CODE is used
8724 for modes that can hold NaNs and ORDERED_CODE is used for
8725 the rest. */
8727 static tree
8728 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8729 enum tree_code unordered_code,
8730 enum tree_code ordered_code)
8732 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8733 enum tree_code code;
8734 tree type0, type1;
8735 enum tree_code code0, code1;
8736 tree cmp_type = NULL_TREE;
8738 type0 = TREE_TYPE (arg0);
8739 type1 = TREE_TYPE (arg1);
8741 code0 = TREE_CODE (type0);
8742 code1 = TREE_CODE (type1);
8744 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8745 /* Choose the wider of two real types. */
8746 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8747 ? type0 : type1;
8748 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8749 cmp_type = type0;
8750 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8751 cmp_type = type1;
8753 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8754 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8756 if (unordered_code == UNORDERED_EXPR)
8758 if (!HONOR_NANS (arg0))
8759 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8760 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8763 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8764 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8765 fold_build2_loc (loc, code, type, arg0, arg1));
8768 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8769 arithmetics if it can never overflow, or into internal functions that
8770 return both result of arithmetics and overflowed boolean flag in
8771 a complex integer result, or some other check for overflow.
8772 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8773 checking part of that. */
8775 static tree
8776 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8777 tree arg0, tree arg1, tree arg2)
8779 enum internal_fn ifn = IFN_LAST;
8780 /* The code of the expression corresponding to the type-generic
8781 built-in, or ERROR_MARK for the type-specific ones. */
8782 enum tree_code opcode = ERROR_MARK;
8783 bool ovf_only = false;
8785 switch (fcode)
8787 case BUILT_IN_ADD_OVERFLOW_P:
8788 ovf_only = true;
8789 /* FALLTHRU */
8790 case BUILT_IN_ADD_OVERFLOW:
8791 opcode = PLUS_EXPR;
8792 /* FALLTHRU */
8793 case BUILT_IN_SADD_OVERFLOW:
8794 case BUILT_IN_SADDL_OVERFLOW:
8795 case BUILT_IN_SADDLL_OVERFLOW:
8796 case BUILT_IN_UADD_OVERFLOW:
8797 case BUILT_IN_UADDL_OVERFLOW:
8798 case BUILT_IN_UADDLL_OVERFLOW:
8799 ifn = IFN_ADD_OVERFLOW;
8800 break;
8801 case BUILT_IN_SUB_OVERFLOW_P:
8802 ovf_only = true;
8803 /* FALLTHRU */
8804 case BUILT_IN_SUB_OVERFLOW:
8805 opcode = MINUS_EXPR;
8806 /* FALLTHRU */
8807 case BUILT_IN_SSUB_OVERFLOW:
8808 case BUILT_IN_SSUBL_OVERFLOW:
8809 case BUILT_IN_SSUBLL_OVERFLOW:
8810 case BUILT_IN_USUB_OVERFLOW:
8811 case BUILT_IN_USUBL_OVERFLOW:
8812 case BUILT_IN_USUBLL_OVERFLOW:
8813 ifn = IFN_SUB_OVERFLOW;
8814 break;
8815 case BUILT_IN_MUL_OVERFLOW_P:
8816 ovf_only = true;
8817 /* FALLTHRU */
8818 case BUILT_IN_MUL_OVERFLOW:
8819 opcode = MULT_EXPR;
8820 /* FALLTHRU */
8821 case BUILT_IN_SMUL_OVERFLOW:
8822 case BUILT_IN_SMULL_OVERFLOW:
8823 case BUILT_IN_SMULLL_OVERFLOW:
8824 case BUILT_IN_UMUL_OVERFLOW:
8825 case BUILT_IN_UMULL_OVERFLOW:
8826 case BUILT_IN_UMULLL_OVERFLOW:
8827 ifn = IFN_MUL_OVERFLOW;
8828 break;
8829 default:
8830 gcc_unreachable ();
8833 /* For the "generic" overloads, the first two arguments can have different
8834 types and the last argument determines the target type to use to check
8835 for overflow. The arguments of the other overloads all have the same
8836 type. */
8837 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8839 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8840 arguments are constant, attempt to fold the built-in call into a constant
8841 expression indicating whether or not it detected an overflow. */
8842 if (ovf_only
8843 && TREE_CODE (arg0) == INTEGER_CST
8844 && TREE_CODE (arg1) == INTEGER_CST)
8845 /* Perform the computation in the target type and check for overflow. */
8846 return omit_one_operand_loc (loc, boolean_type_node,
8847 arith_overflowed_p (opcode, type, arg0, arg1)
8848 ? boolean_true_node : boolean_false_node,
8849 arg2);
8851 tree ctype = build_complex_type (type);
8852 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8853 2, arg0, arg1);
8854 tree tgt = save_expr (call);
8855 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8856 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8857 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8859 if (ovf_only)
8860 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8862 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8863 tree store
8864 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8865 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8868 /* Fold a call to __builtin_FILE to a constant string. */
8870 static inline tree
8871 fold_builtin_FILE (location_t loc)
8873 if (const char *fname = LOCATION_FILE (loc))
8874 return build_string_literal (strlen (fname) + 1, fname);
8876 return build_string_literal (1, "");
8879 /* Fold a call to __builtin_FUNCTION to a constant string. */
8881 static inline tree
8882 fold_builtin_FUNCTION ()
8884 const char *name = "";
8886 if (current_function_decl)
8887 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8889 return build_string_literal (strlen (name) + 1, name);
8892 /* Fold a call to __builtin_LINE to an integer constant. */
8894 static inline tree
8895 fold_builtin_LINE (location_t loc, tree type)
8897 return build_int_cst (type, LOCATION_LINE (loc));
8900 /* Fold a call to built-in function FNDECL with 0 arguments.
8901 This function returns NULL_TREE if no simplification was possible. */
8903 static tree
8904 fold_builtin_0 (location_t loc, tree fndecl)
8906 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8907 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8908 switch (fcode)
8910 case BUILT_IN_FILE:
8911 return fold_builtin_FILE (loc);
8913 case BUILT_IN_FUNCTION:
8914 return fold_builtin_FUNCTION ();
8916 case BUILT_IN_LINE:
8917 return fold_builtin_LINE (loc, type);
8919 CASE_FLT_FN (BUILT_IN_INF):
8920 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8921 case BUILT_IN_INFD32:
8922 case BUILT_IN_INFD64:
8923 case BUILT_IN_INFD128:
8924 return fold_builtin_inf (loc, type, true);
8926 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8927 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8928 return fold_builtin_inf (loc, type, false);
8930 case BUILT_IN_CLASSIFY_TYPE:
8931 return fold_builtin_classify_type (NULL_TREE);
8933 default:
8934 break;
8936 return NULL_TREE;
8939 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8940 This function returns NULL_TREE if no simplification was possible. */
8942 static tree
8943 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8945 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8948 if (TREE_CODE (arg0) == ERROR_MARK)
8949 return NULL_TREE;
8951 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8952 return ret;
8954 switch (fcode)
8956 case BUILT_IN_CONSTANT_P:
8958 tree val = fold_builtin_constant_p (arg0);
8960 /* Gimplification will pull the CALL_EXPR for the builtin out of
8961 an if condition. When not optimizing, we'll not CSE it back.
8962 To avoid link error types of regressions, return false now. */
8963 if (!val && !optimize)
8964 val = integer_zero_node;
8966 return val;
8969 case BUILT_IN_CLASSIFY_TYPE:
8970 return fold_builtin_classify_type (arg0);
8972 case BUILT_IN_STRLEN:
8973 return fold_builtin_strlen (loc, type, arg0);
8975 CASE_FLT_FN (BUILT_IN_FABS):
8976 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8977 case BUILT_IN_FABSD32:
8978 case BUILT_IN_FABSD64:
8979 case BUILT_IN_FABSD128:
8980 return fold_builtin_fabs (loc, arg0, type);
8982 case BUILT_IN_ABS:
8983 case BUILT_IN_LABS:
8984 case BUILT_IN_LLABS:
8985 case BUILT_IN_IMAXABS:
8986 return fold_builtin_abs (loc, arg0, type);
8988 CASE_FLT_FN (BUILT_IN_CONJ):
8989 if (validate_arg (arg0, COMPLEX_TYPE)
8990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8991 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8992 break;
8994 CASE_FLT_FN (BUILT_IN_CREAL):
8995 if (validate_arg (arg0, COMPLEX_TYPE)
8996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8997 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8998 break;
9000 CASE_FLT_FN (BUILT_IN_CIMAG):
9001 if (validate_arg (arg0, COMPLEX_TYPE)
9002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9003 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9004 break;
9006 CASE_FLT_FN (BUILT_IN_CARG):
9007 return fold_builtin_carg (loc, arg0, type);
9009 case BUILT_IN_ISASCII:
9010 return fold_builtin_isascii (loc, arg0);
9012 case BUILT_IN_TOASCII:
9013 return fold_builtin_toascii (loc, arg0);
9015 case BUILT_IN_ISDIGIT:
9016 return fold_builtin_isdigit (loc, arg0);
9018 CASE_FLT_FN (BUILT_IN_FINITE):
9019 case BUILT_IN_FINITED32:
9020 case BUILT_IN_FINITED64:
9021 case BUILT_IN_FINITED128:
9022 case BUILT_IN_ISFINITE:
9024 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9025 if (ret)
9026 return ret;
9027 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9030 CASE_FLT_FN (BUILT_IN_ISINF):
9031 case BUILT_IN_ISINFD32:
9032 case BUILT_IN_ISINFD64:
9033 case BUILT_IN_ISINFD128:
9035 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9036 if (ret)
9037 return ret;
9038 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9041 case BUILT_IN_ISNORMAL:
9042 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9044 case BUILT_IN_ISINF_SIGN:
9045 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9047 CASE_FLT_FN (BUILT_IN_ISNAN):
9048 case BUILT_IN_ISNAND32:
9049 case BUILT_IN_ISNAND64:
9050 case BUILT_IN_ISNAND128:
9051 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9053 case BUILT_IN_FREE:
9054 if (integer_zerop (arg0))
9055 return build_empty_stmt (loc);
9056 break;
9058 default:
9059 break;
9062 return NULL_TREE;
9066 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9067 This function returns NULL_TREE if no simplification was possible. */
9069 static tree
9070 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9072 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9075 if (TREE_CODE (arg0) == ERROR_MARK
9076 || TREE_CODE (arg1) == ERROR_MARK)
9077 return NULL_TREE;
9079 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9080 return ret;
9082 switch (fcode)
9084 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9085 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9086 if (validate_arg (arg0, REAL_TYPE)
9087 && validate_arg (arg1, POINTER_TYPE))
9088 return do_mpfr_lgamma_r (arg0, arg1, type);
9089 break;
9091 CASE_FLT_FN (BUILT_IN_FREXP):
9092 return fold_builtin_frexp (loc, arg0, arg1, type);
9094 CASE_FLT_FN (BUILT_IN_MODF):
9095 return fold_builtin_modf (loc, arg0, arg1, type);
9097 case BUILT_IN_STRSPN:
9098 return fold_builtin_strspn (loc, arg0, arg1);
9100 case BUILT_IN_STRCSPN:
9101 return fold_builtin_strcspn (loc, arg0, arg1);
9103 case BUILT_IN_STRPBRK:
9104 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9106 case BUILT_IN_EXPECT:
9107 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9109 case BUILT_IN_ISGREATER:
9110 return fold_builtin_unordered_cmp (loc, fndecl,
9111 arg0, arg1, UNLE_EXPR, LE_EXPR);
9112 case BUILT_IN_ISGREATEREQUAL:
9113 return fold_builtin_unordered_cmp (loc, fndecl,
9114 arg0, arg1, UNLT_EXPR, LT_EXPR);
9115 case BUILT_IN_ISLESS:
9116 return fold_builtin_unordered_cmp (loc, fndecl,
9117 arg0, arg1, UNGE_EXPR, GE_EXPR);
9118 case BUILT_IN_ISLESSEQUAL:
9119 return fold_builtin_unordered_cmp (loc, fndecl,
9120 arg0, arg1, UNGT_EXPR, GT_EXPR);
9121 case BUILT_IN_ISLESSGREATER:
9122 return fold_builtin_unordered_cmp (loc, fndecl,
9123 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9124 case BUILT_IN_ISUNORDERED:
9125 return fold_builtin_unordered_cmp (loc, fndecl,
9126 arg0, arg1, UNORDERED_EXPR,
9127 NOP_EXPR);
9129 /* We do the folding for va_start in the expander. */
9130 case BUILT_IN_VA_START:
9131 break;
9133 case BUILT_IN_OBJECT_SIZE:
9134 return fold_builtin_object_size (arg0, arg1);
9136 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9137 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9139 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9140 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9142 default:
9143 break;
9145 return NULL_TREE;
9148 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9149 and ARG2.
9150 This function returns NULL_TREE if no simplification was possible. */
9152 static tree
9153 fold_builtin_3 (location_t loc, tree fndecl,
9154 tree arg0, tree arg1, tree arg2)
9156 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9159 if (TREE_CODE (arg0) == ERROR_MARK
9160 || TREE_CODE (arg1) == ERROR_MARK
9161 || TREE_CODE (arg2) == ERROR_MARK)
9162 return NULL_TREE;
9164 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9165 arg0, arg1, arg2))
9166 return ret;
9168 switch (fcode)
9171 CASE_FLT_FN (BUILT_IN_SINCOS):
9172 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9174 CASE_FLT_FN (BUILT_IN_FMA):
9175 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9176 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9178 CASE_FLT_FN (BUILT_IN_REMQUO):
9179 if (validate_arg (arg0, REAL_TYPE)
9180 && validate_arg (arg1, REAL_TYPE)
9181 && validate_arg (arg2, POINTER_TYPE))
9182 return do_mpfr_remquo (arg0, arg1, arg2);
9183 break;
9185 case BUILT_IN_MEMCMP:
9186 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9188 case BUILT_IN_EXPECT:
9189 return fold_builtin_expect (loc, arg0, arg1, arg2);
9191 case BUILT_IN_ADD_OVERFLOW:
9192 case BUILT_IN_SUB_OVERFLOW:
9193 case BUILT_IN_MUL_OVERFLOW:
9194 case BUILT_IN_ADD_OVERFLOW_P:
9195 case BUILT_IN_SUB_OVERFLOW_P:
9196 case BUILT_IN_MUL_OVERFLOW_P:
9197 case BUILT_IN_SADD_OVERFLOW:
9198 case BUILT_IN_SADDL_OVERFLOW:
9199 case BUILT_IN_SADDLL_OVERFLOW:
9200 case BUILT_IN_SSUB_OVERFLOW:
9201 case BUILT_IN_SSUBL_OVERFLOW:
9202 case BUILT_IN_SSUBLL_OVERFLOW:
9203 case BUILT_IN_SMUL_OVERFLOW:
9204 case BUILT_IN_SMULL_OVERFLOW:
9205 case BUILT_IN_SMULLL_OVERFLOW:
9206 case BUILT_IN_UADD_OVERFLOW:
9207 case BUILT_IN_UADDL_OVERFLOW:
9208 case BUILT_IN_UADDLL_OVERFLOW:
9209 case BUILT_IN_USUB_OVERFLOW:
9210 case BUILT_IN_USUBL_OVERFLOW:
9211 case BUILT_IN_USUBLL_OVERFLOW:
9212 case BUILT_IN_UMUL_OVERFLOW:
9213 case BUILT_IN_UMULL_OVERFLOW:
9214 case BUILT_IN_UMULLL_OVERFLOW:
9215 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9217 default:
9218 break;
9220 return NULL_TREE;
9223 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9224 arguments. IGNORE is true if the result of the
9225 function call is ignored. This function returns NULL_TREE if no
9226 simplification was possible. */
9228 tree
9229 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9231 tree ret = NULL_TREE;
9233 switch (nargs)
9235 case 0:
9236 ret = fold_builtin_0 (loc, fndecl);
9237 break;
9238 case 1:
9239 ret = fold_builtin_1 (loc, fndecl, args[0]);
9240 break;
9241 case 2:
9242 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9243 break;
9244 case 3:
9245 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9246 break;
9247 default:
9248 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9249 break;
9251 if (ret)
9253 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9254 SET_EXPR_LOCATION (ret, loc);
9255 TREE_NO_WARNING (ret) = 1;
9256 return ret;
9258 return NULL_TREE;
9261 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9262 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9263 of arguments in ARGS to be omitted. OLDNARGS is the number of
9264 elements in ARGS. */
9266 static tree
9267 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9268 int skip, tree fndecl, int n, va_list newargs)
9270 int nargs = oldnargs - skip + n;
9271 tree *buffer;
9273 if (n > 0)
9275 int i, j;
9277 buffer = XALLOCAVEC (tree, nargs);
9278 for (i = 0; i < n; i++)
9279 buffer[i] = va_arg (newargs, tree);
9280 for (j = skip; j < oldnargs; j++, i++)
9281 buffer[i] = args[j];
9283 else
9284 buffer = args + skip;
9286 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9289 /* Return true if FNDECL shouldn't be folded right now.
9290 If a built-in function has an inline attribute always_inline
9291 wrapper, defer folding it after always_inline functions have
9292 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9293 might not be performed. */
9295 bool
9296 avoid_folding_inline_builtin (tree fndecl)
9298 return (DECL_DECLARED_INLINE_P (fndecl)
9299 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9300 && cfun
9301 && !cfun->always_inline_functions_inlined
9302 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9305 /* A wrapper function for builtin folding that prevents warnings for
9306 "statement without effect" and the like, caused by removing the
9307 call node earlier than the warning is generated. */
9309 tree
9310 fold_call_expr (location_t loc, tree exp, bool ignore)
9312 tree ret = NULL_TREE;
9313 tree fndecl = get_callee_fndecl (exp);
9314 if (fndecl
9315 && TREE_CODE (fndecl) == FUNCTION_DECL
9316 && DECL_BUILT_IN (fndecl)
9317 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9318 yet. Defer folding until we see all the arguments
9319 (after inlining). */
9320 && !CALL_EXPR_VA_ARG_PACK (exp))
9322 int nargs = call_expr_nargs (exp);
9324 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9325 instead last argument is __builtin_va_arg_pack (). Defer folding
9326 even in that case, until arguments are finalized. */
9327 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9329 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9330 if (fndecl2
9331 && TREE_CODE (fndecl2) == FUNCTION_DECL
9332 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9333 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9334 return NULL_TREE;
9337 if (avoid_folding_inline_builtin (fndecl))
9338 return NULL_TREE;
9340 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9341 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9342 CALL_EXPR_ARGP (exp), ignore);
9343 else
9345 tree *args = CALL_EXPR_ARGP (exp);
9346 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9347 if (ret)
9348 return ret;
9351 return NULL_TREE;
9354 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9355 N arguments are passed in the array ARGARRAY. Return a folded
9356 expression or NULL_TREE if no simplification was possible. */
9358 tree
9359 fold_builtin_call_array (location_t loc, tree,
9360 tree fn,
9361 int n,
9362 tree *argarray)
9364 if (TREE_CODE (fn) != ADDR_EXPR)
9365 return NULL_TREE;
9367 tree fndecl = TREE_OPERAND (fn, 0);
9368 if (TREE_CODE (fndecl) == FUNCTION_DECL
9369 && DECL_BUILT_IN (fndecl))
9371 /* If last argument is __builtin_va_arg_pack (), arguments to this
9372 function are not finalized yet. Defer folding until they are. */
9373 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9375 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9376 if (fndecl2
9377 && TREE_CODE (fndecl2) == FUNCTION_DECL
9378 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9379 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9380 return NULL_TREE;
9382 if (avoid_folding_inline_builtin (fndecl))
9383 return NULL_TREE;
9384 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9385 return targetm.fold_builtin (fndecl, n, argarray, false);
9386 else
9387 return fold_builtin_n (loc, fndecl, argarray, n, false);
9390 return NULL_TREE;
9393 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9394 along with N new arguments specified as the "..." parameters. SKIP
9395 is the number of arguments in EXP to be omitted. This function is used
9396 to do varargs-to-varargs transformations. */
9398 static tree
9399 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9401 va_list ap;
9402 tree t;
9404 va_start (ap, n);
9405 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9406 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9407 va_end (ap);
9409 return t;
9412 /* Validate a single argument ARG against a tree code CODE representing
9413 a type. Return true when argument is valid. */
9415 static bool
9416 validate_arg (const_tree arg, enum tree_code code)
9418 if (!arg)
9419 return false;
9420 else if (code == POINTER_TYPE)
9421 return POINTER_TYPE_P (TREE_TYPE (arg));
9422 else if (code == INTEGER_TYPE)
9423 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9424 return code == TREE_CODE (TREE_TYPE (arg));
9427 /* This function validates the types of a function call argument list
9428 against a specified list of tree_codes. If the last specifier is a 0,
9429 that represents an ellipses, otherwise the last specifier must be a
9430 VOID_TYPE.
9432 This is the GIMPLE version of validate_arglist. Eventually we want to
9433 completely convert builtins.c to work from GIMPLEs and the tree based
9434 validate_arglist will then be removed. */
9436 bool
9437 validate_gimple_arglist (const gcall *call, ...)
9439 enum tree_code code;
9440 bool res = 0;
9441 va_list ap;
9442 const_tree arg;
9443 size_t i;
9445 va_start (ap, call);
9446 i = 0;
9450 code = (enum tree_code) va_arg (ap, int);
9451 switch (code)
9453 case 0:
9454 /* This signifies an ellipses, any further arguments are all ok. */
9455 res = true;
9456 goto end;
9457 case VOID_TYPE:
9458 /* This signifies an endlink, if no arguments remain, return
9459 true, otherwise return false. */
9460 res = (i == gimple_call_num_args (call));
9461 goto end;
9462 default:
9463 /* If no parameters remain or the parameter's code does not
9464 match the specified code, return false. Otherwise continue
9465 checking any remaining arguments. */
9466 arg = gimple_call_arg (call, i++);
9467 if (!validate_arg (arg, code))
9468 goto end;
9469 break;
9472 while (1);
9474 /* We need gotos here since we can only have one VA_CLOSE in a
9475 function. */
9476 end: ;
9477 va_end (ap);
9479 return res;
9482 /* Default target-specific builtin expander that does nothing. */
9485 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9486 rtx target ATTRIBUTE_UNUSED,
9487 rtx subtarget ATTRIBUTE_UNUSED,
9488 machine_mode mode ATTRIBUTE_UNUSED,
9489 int ignore ATTRIBUTE_UNUSED)
9491 return NULL_RTX;
9494 /* Returns true is EXP represents data that would potentially reside
9495 in a readonly section. */
9497 bool
9498 readonly_data_expr (tree exp)
9500 STRIP_NOPS (exp);
9502 if (TREE_CODE (exp) != ADDR_EXPR)
9503 return false;
9505 exp = get_base_address (TREE_OPERAND (exp, 0));
9506 if (!exp)
9507 return false;
9509 /* Make sure we call decl_readonly_section only for trees it
9510 can handle (since it returns true for everything it doesn't
9511 understand). */
9512 if (TREE_CODE (exp) == STRING_CST
9513 || TREE_CODE (exp) == CONSTRUCTOR
9514 || (VAR_P (exp) && TREE_STATIC (exp)))
9515 return decl_readonly_section (exp, 0);
9516 else
9517 return false;
9520 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9521 to the call, and TYPE is its return type.
9523 Return NULL_TREE if no simplification was possible, otherwise return the
9524 simplified form of the call as a tree.
9526 The simplified form may be a constant or other expression which
9527 computes the same value, but in a more efficient manner (including
9528 calls to other builtin functions).
9530 The call may contain arguments which need to be evaluated, but
9531 which are not useful to determine the result of the call. In
9532 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9533 COMPOUND_EXPR will be an argument which must be evaluated.
9534 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9535 COMPOUND_EXPR in the chain will contain the tree for the simplified
9536 form of the builtin function call. */
9538 static tree
9539 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9541 if (!validate_arg (s1, POINTER_TYPE)
9542 || !validate_arg (s2, POINTER_TYPE))
9543 return NULL_TREE;
9544 else
9546 tree fn;
9547 const char *p1, *p2;
9549 p2 = c_getstr (s2);
9550 if (p2 == NULL)
9551 return NULL_TREE;
9553 p1 = c_getstr (s1);
9554 if (p1 != NULL)
9556 const char *r = strpbrk (p1, p2);
9557 tree tem;
9559 if (r == NULL)
9560 return build_int_cst (TREE_TYPE (s1), 0);
9562 /* Return an offset into the constant string argument. */
9563 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9564 return fold_convert_loc (loc, type, tem);
9567 if (p2[0] == '\0')
9568 /* strpbrk(x, "") == NULL.
9569 Evaluate and ignore s1 in case it had side-effects. */
9570 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9572 if (p2[1] != '\0')
9573 return NULL_TREE; /* Really call strpbrk. */
9575 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9576 if (!fn)
9577 return NULL_TREE;
9579 /* New argument list transforming strpbrk(s1, s2) to
9580 strchr(s1, s2[0]). */
9581 return build_call_expr_loc (loc, fn, 2, s1,
9582 build_int_cst (integer_type_node, p2[0]));
9586 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9587 to the call.
9589 Return NULL_TREE if no simplification was possible, otherwise return the
9590 simplified form of the call as a tree.
9592 The simplified form may be a constant or other expression which
9593 computes the same value, but in a more efficient manner (including
9594 calls to other builtin functions).
9596 The call may contain arguments which need to be evaluated, but
9597 which are not useful to determine the result of the call. In
9598 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9599 COMPOUND_EXPR will be an argument which must be evaluated.
9600 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9601 COMPOUND_EXPR in the chain will contain the tree for the simplified
9602 form of the builtin function call. */
9604 static tree
9605 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9607 if (!validate_arg (s1, POINTER_TYPE)
9608 || !validate_arg (s2, POINTER_TYPE))
9609 return NULL_TREE;
9610 else
9612 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9614 /* If either argument is "", return NULL_TREE. */
9615 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9616 /* Evaluate and ignore both arguments in case either one has
9617 side-effects. */
9618 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9619 s1, s2);
9620 return NULL_TREE;
9624 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9625 to the call.
9627 Return NULL_TREE if no simplification was possible, otherwise return the
9628 simplified form of the call as a tree.
9630 The simplified form may be a constant or other expression which
9631 computes the same value, but in a more efficient manner (including
9632 calls to other builtin functions).
9634 The call may contain arguments which need to be evaluated, but
9635 which are not useful to determine the result of the call. In
9636 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9637 COMPOUND_EXPR will be an argument which must be evaluated.
9638 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9639 COMPOUND_EXPR in the chain will contain the tree for the simplified
9640 form of the builtin function call. */
9642 static tree
9643 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9645 if (!validate_arg (s1, POINTER_TYPE)
9646 || !validate_arg (s2, POINTER_TYPE))
9647 return NULL_TREE;
9648 else
9650 /* If the first argument is "", return NULL_TREE. */
9651 const char *p1 = c_getstr (s1);
9652 if (p1 && *p1 == '\0')
9654 /* Evaluate and ignore argument s2 in case it has
9655 side-effects. */
9656 return omit_one_operand_loc (loc, size_type_node,
9657 size_zero_node, s2);
9660 /* If the second argument is "", return __builtin_strlen(s1). */
9661 const char *p2 = c_getstr (s2);
9662 if (p2 && *p2 == '\0')
9664 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9666 /* If the replacement _DECL isn't initialized, don't do the
9667 transformation. */
9668 if (!fn)
9669 return NULL_TREE;
9671 return build_call_expr_loc (loc, fn, 1, s1);
9673 return NULL_TREE;
9677 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9678 produced. False otherwise. This is done so that we don't output the error
9679 or warning twice or three times. */
9681 bool
9682 fold_builtin_next_arg (tree exp, bool va_start_p)
9684 tree fntype = TREE_TYPE (current_function_decl);
9685 int nargs = call_expr_nargs (exp);
9686 tree arg;
9687 /* There is good chance the current input_location points inside the
9688 definition of the va_start macro (perhaps on the token for
9689 builtin) in a system header, so warnings will not be emitted.
9690 Use the location in real source code. */
9691 source_location current_location =
9692 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9693 NULL);
9695 if (!stdarg_p (fntype))
9697 error ("%<va_start%> used in function with fixed args");
9698 return true;
9701 if (va_start_p)
9703 if (va_start_p && (nargs != 2))
9705 error ("wrong number of arguments to function %<va_start%>");
9706 return true;
9708 arg = CALL_EXPR_ARG (exp, 1);
9710 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9711 when we checked the arguments and if needed issued a warning. */
9712 else
9714 if (nargs == 0)
9716 /* Evidently an out of date version of <stdarg.h>; can't validate
9717 va_start's second argument, but can still work as intended. */
9718 warning_at (current_location,
9719 OPT_Wvarargs,
9720 "%<__builtin_next_arg%> called without an argument");
9721 return true;
9723 else if (nargs > 1)
9725 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9726 return true;
9728 arg = CALL_EXPR_ARG (exp, 0);
9731 if (TREE_CODE (arg) == SSA_NAME)
9732 arg = SSA_NAME_VAR (arg);
9734 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9735 or __builtin_next_arg (0) the first time we see it, after checking
9736 the arguments and if needed issuing a warning. */
9737 if (!integer_zerop (arg))
9739 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9741 /* Strip off all nops for the sake of the comparison. This
9742 is not quite the same as STRIP_NOPS. It does more.
9743 We must also strip off INDIRECT_EXPR for C++ reference
9744 parameters. */
9745 while (CONVERT_EXPR_P (arg)
9746 || TREE_CODE (arg) == INDIRECT_REF)
9747 arg = TREE_OPERAND (arg, 0);
9748 if (arg != last_parm)
9750 /* FIXME: Sometimes with the tree optimizers we can get the
9751 not the last argument even though the user used the last
9752 argument. We just warn and set the arg to be the last
9753 argument so that we will get wrong-code because of
9754 it. */
9755 warning_at (current_location,
9756 OPT_Wvarargs,
9757 "second parameter of %<va_start%> not last named argument");
9760 /* Undefined by C99 7.15.1.4p4 (va_start):
9761 "If the parameter parmN is declared with the register storage
9762 class, with a function or array type, or with a type that is
9763 not compatible with the type that results after application of
9764 the default argument promotions, the behavior is undefined."
9766 else if (DECL_REGISTER (arg))
9768 warning_at (current_location,
9769 OPT_Wvarargs,
9770 "undefined behavior when second parameter of "
9771 "%<va_start%> is declared with %<register%> storage");
9774 /* We want to verify the second parameter just once before the tree
9775 optimizers are run and then avoid keeping it in the tree,
9776 as otherwise we could warn even for correct code like:
9777 void foo (int i, ...)
9778 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9779 if (va_start_p)
9780 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9781 else
9782 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9784 return false;
9788 /* Expand a call EXP to __builtin_object_size. */
9790 static rtx
9791 expand_builtin_object_size (tree exp)
9793 tree ost;
9794 int object_size_type;
9795 tree fndecl = get_callee_fndecl (exp);
9797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9799 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9800 exp, fndecl);
9801 expand_builtin_trap ();
9802 return const0_rtx;
9805 ost = CALL_EXPR_ARG (exp, 1);
9806 STRIP_NOPS (ost);
9808 if (TREE_CODE (ost) != INTEGER_CST
9809 || tree_int_cst_sgn (ost) < 0
9810 || compare_tree_int (ost, 3) > 0)
9812 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9813 exp, fndecl);
9814 expand_builtin_trap ();
9815 return const0_rtx;
9818 object_size_type = tree_to_shwi (ost);
9820 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9823 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9824 FCODE is the BUILT_IN_* to use.
9825 Return NULL_RTX if we failed; the caller should emit a normal call,
9826 otherwise try to get the result in TARGET, if convenient (and in
9827 mode MODE if that's convenient). */
9829 static rtx
9830 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9831 enum built_in_function fcode)
9833 if (!validate_arglist (exp,
9834 POINTER_TYPE,
9835 fcode == BUILT_IN_MEMSET_CHK
9836 ? INTEGER_TYPE : POINTER_TYPE,
9837 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9838 return NULL_RTX;
9840 tree dest = CALL_EXPR_ARG (exp, 0);
9841 tree src = CALL_EXPR_ARG (exp, 1);
9842 tree len = CALL_EXPR_ARG (exp, 2);
9843 tree size = CALL_EXPR_ARG (exp, 3);
9845 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9846 /*str=*/NULL_TREE, size);
9848 if (!tree_fits_uhwi_p (size))
9849 return NULL_RTX;
9851 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9853 /* Avoid transforming the checking call to an ordinary one when
9854 an overflow has been detected or when the call couldn't be
9855 validated because the size is not constant. */
9856 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9857 return NULL_RTX;
9859 tree fn = NULL_TREE;
9860 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9861 mem{cpy,pcpy,move,set} is available. */
9862 switch (fcode)
9864 case BUILT_IN_MEMCPY_CHK:
9865 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9866 break;
9867 case BUILT_IN_MEMPCPY_CHK:
9868 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9869 break;
9870 case BUILT_IN_MEMMOVE_CHK:
9871 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9872 break;
9873 case BUILT_IN_MEMSET_CHK:
9874 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9875 break;
9876 default:
9877 break;
9880 if (! fn)
9881 return NULL_RTX;
9883 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9884 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9885 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9886 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9888 else if (fcode == BUILT_IN_MEMSET_CHK)
9889 return NULL_RTX;
9890 else
9892 unsigned int dest_align = get_pointer_alignment (dest);
9894 /* If DEST is not a pointer type, call the normal function. */
9895 if (dest_align == 0)
9896 return NULL_RTX;
9898 /* If SRC and DEST are the same (and not volatile), do nothing. */
9899 if (operand_equal_p (src, dest, 0))
9901 tree expr;
9903 if (fcode != BUILT_IN_MEMPCPY_CHK)
9905 /* Evaluate and ignore LEN in case it has side-effects. */
9906 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9907 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9910 expr = fold_build_pointer_plus (dest, len);
9911 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9914 /* __memmove_chk special case. */
9915 if (fcode == BUILT_IN_MEMMOVE_CHK)
9917 unsigned int src_align = get_pointer_alignment (src);
9919 if (src_align == 0)
9920 return NULL_RTX;
9922 /* If src is categorized for a readonly section we can use
9923 normal __memcpy_chk. */
9924 if (readonly_data_expr (src))
9926 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9927 if (!fn)
9928 return NULL_RTX;
9929 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9930 dest, src, len, size);
9931 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9932 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9933 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9936 return NULL_RTX;
9940 /* Emit warning if a buffer overflow is detected at compile time. */
9942 static void
9943 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9945 /* The source string. */
9946 tree srcstr = NULL_TREE;
9947 /* The size of the destination object. */
9948 tree objsize = NULL_TREE;
9949 /* The string that is being concatenated with (as in __strcat_chk)
9950 or null if it isn't. */
9951 tree catstr = NULL_TREE;
9952 /* The maximum length of the source sequence in a bounded operation
9953 (such as __strncat_chk) or null if the operation isn't bounded
9954 (such as __strcat_chk). */
9955 tree maxread = NULL_TREE;
9956 /* The exact size of the access (such as in __strncpy_chk). */
9957 tree size = NULL_TREE;
9959 switch (fcode)
9961 case BUILT_IN_STRCPY_CHK:
9962 case BUILT_IN_STPCPY_CHK:
9963 srcstr = CALL_EXPR_ARG (exp, 1);
9964 objsize = CALL_EXPR_ARG (exp, 2);
9965 break;
9967 case BUILT_IN_STRCAT_CHK:
9968 /* For __strcat_chk the warning will be emitted only if overflowing
9969 by at least strlen (dest) + 1 bytes. */
9970 catstr = CALL_EXPR_ARG (exp, 0);
9971 srcstr = CALL_EXPR_ARG (exp, 1);
9972 objsize = CALL_EXPR_ARG (exp, 2);
9973 break;
9975 case BUILT_IN_STRNCAT_CHK:
9976 catstr = CALL_EXPR_ARG (exp, 0);
9977 srcstr = CALL_EXPR_ARG (exp, 1);
9978 maxread = CALL_EXPR_ARG (exp, 2);
9979 objsize = CALL_EXPR_ARG (exp, 3);
9980 break;
9982 case BUILT_IN_STRNCPY_CHK:
9983 case BUILT_IN_STPNCPY_CHK:
9984 srcstr = CALL_EXPR_ARG (exp, 1);
9985 size = CALL_EXPR_ARG (exp, 2);
9986 objsize = CALL_EXPR_ARG (exp, 3);
9987 break;
9989 case BUILT_IN_SNPRINTF_CHK:
9990 case BUILT_IN_VSNPRINTF_CHK:
9991 maxread = CALL_EXPR_ARG (exp, 1);
9992 objsize = CALL_EXPR_ARG (exp, 3);
9993 break;
9994 default:
9995 gcc_unreachable ();
9998 if (catstr && maxread)
10000 /* Check __strncat_chk. There is no way to determine the length
10001 of the string to which the source string is being appended so
10002 just warn when the length of the source string is not known. */
10003 check_strncat_sizes (exp, objsize);
10004 return;
10007 /* The destination argument is the first one for all built-ins above. */
10008 tree dst = CALL_EXPR_ARG (exp, 0);
10010 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10013 /* Emit warning if a buffer overflow is detected at compile time
10014 in __sprintf_chk/__vsprintf_chk calls. */
10016 static void
10017 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10019 tree size, len, fmt;
10020 const char *fmt_str;
10021 int nargs = call_expr_nargs (exp);
10023 /* Verify the required arguments in the original call. */
10025 if (nargs < 4)
10026 return;
10027 size = CALL_EXPR_ARG (exp, 2);
10028 fmt = CALL_EXPR_ARG (exp, 3);
10030 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10031 return;
10033 /* Check whether the format is a literal string constant. */
10034 fmt_str = c_getstr (fmt);
10035 if (fmt_str == NULL)
10036 return;
10038 if (!init_target_chars ())
10039 return;
10041 /* If the format doesn't contain % args or %%, we know its size. */
10042 if (strchr (fmt_str, target_percent) == 0)
10043 len = build_int_cstu (size_type_node, strlen (fmt_str));
10044 /* If the format is "%s" and first ... argument is a string literal,
10045 we know it too. */
10046 else if (fcode == BUILT_IN_SPRINTF_CHK
10047 && strcmp (fmt_str, target_percent_s) == 0)
10049 tree arg;
10051 if (nargs < 5)
10052 return;
10053 arg = CALL_EXPR_ARG (exp, 4);
10054 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10055 return;
10057 len = c_strlen (arg, 1);
10058 if (!len || ! tree_fits_uhwi_p (len))
10059 return;
10061 else
10062 return;
10064 /* Add one for the terminating nul. */
10065 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10067 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10068 /*maxread=*/NULL_TREE, len, size);
10071 /* Emit warning if a free is called with address of a variable. */
10073 static void
10074 maybe_emit_free_warning (tree exp)
10076 tree arg = CALL_EXPR_ARG (exp, 0);
10078 STRIP_NOPS (arg);
10079 if (TREE_CODE (arg) != ADDR_EXPR)
10080 return;
10082 arg = get_base_address (TREE_OPERAND (arg, 0));
10083 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10084 return;
10086 if (SSA_VAR_P (arg))
10087 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10088 "%Kattempt to free a non-heap object %qD", exp, arg);
10089 else
10090 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10091 "%Kattempt to free a non-heap object", exp);
10094 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10095 if possible. */
10097 static tree
10098 fold_builtin_object_size (tree ptr, tree ost)
10100 unsigned HOST_WIDE_INT bytes;
10101 int object_size_type;
10103 if (!validate_arg (ptr, POINTER_TYPE)
10104 || !validate_arg (ost, INTEGER_TYPE))
10105 return NULL_TREE;
10107 STRIP_NOPS (ost);
10109 if (TREE_CODE (ost) != INTEGER_CST
10110 || tree_int_cst_sgn (ost) < 0
10111 || compare_tree_int (ost, 3) > 0)
10112 return NULL_TREE;
10114 object_size_type = tree_to_shwi (ost);
10116 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10117 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10118 and (size_t) 0 for types 2 and 3. */
10119 if (TREE_SIDE_EFFECTS (ptr))
10120 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10122 if (TREE_CODE (ptr) == ADDR_EXPR)
10124 compute_builtin_object_size (ptr, object_size_type, &bytes);
10125 if (wi::fits_to_tree_p (bytes, size_type_node))
10126 return build_int_cstu (size_type_node, bytes);
10128 else if (TREE_CODE (ptr) == SSA_NAME)
10130 /* If object size is not known yet, delay folding until
10131 later. Maybe subsequent passes will help determining
10132 it. */
10133 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10134 && wi::fits_to_tree_p (bytes, size_type_node))
10135 return build_int_cstu (size_type_node, bytes);
10138 return NULL_TREE;
10141 /* Builtins with folding operations that operate on "..." arguments
10142 need special handling; we need to store the arguments in a convenient
10143 data structure before attempting any folding. Fortunately there are
10144 only a few builtins that fall into this category. FNDECL is the
10145 function, EXP is the CALL_EXPR for the call. */
10147 static tree
10148 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10150 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10151 tree ret = NULL_TREE;
10153 switch (fcode)
10155 case BUILT_IN_FPCLASSIFY:
10156 ret = fold_builtin_fpclassify (loc, args, nargs);
10157 break;
10159 default:
10160 break;
10162 if (ret)
10164 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10165 SET_EXPR_LOCATION (ret, loc);
10166 TREE_NO_WARNING (ret) = 1;
10167 return ret;
10169 return NULL_TREE;
10172 /* Initialize format string characters in the target charset. */
10174 bool
10175 init_target_chars (void)
10177 static bool init;
10178 if (!init)
10180 target_newline = lang_hooks.to_target_charset ('\n');
10181 target_percent = lang_hooks.to_target_charset ('%');
10182 target_c = lang_hooks.to_target_charset ('c');
10183 target_s = lang_hooks.to_target_charset ('s');
10184 if (target_newline == 0 || target_percent == 0 || target_c == 0
10185 || target_s == 0)
10186 return false;
10188 target_percent_c[0] = target_percent;
10189 target_percent_c[1] = target_c;
10190 target_percent_c[2] = '\0';
10192 target_percent_s[0] = target_percent;
10193 target_percent_s[1] = target_s;
10194 target_percent_s[2] = '\0';
10196 target_percent_s_newline[0] = target_percent;
10197 target_percent_s_newline[1] = target_s;
10198 target_percent_s_newline[2] = target_newline;
10199 target_percent_s_newline[3] = '\0';
10201 init = true;
10203 return true;
10206 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10207 and no overflow/underflow occurred. INEXACT is true if M was not
10208 exactly calculated. TYPE is the tree type for the result. This
10209 function assumes that you cleared the MPFR flags and then
10210 calculated M to see if anything subsequently set a flag prior to
10211 entering this function. Return NULL_TREE if any checks fail. */
10213 static tree
10214 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10216 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10217 overflow/underflow occurred. If -frounding-math, proceed iff the
10218 result of calling FUNC was exact. */
10219 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10220 && (!flag_rounding_math || !inexact))
10222 REAL_VALUE_TYPE rr;
10224 real_from_mpfr (&rr, m, type, GMP_RNDN);
10225 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10226 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10227 but the mpft_t is not, then we underflowed in the
10228 conversion. */
10229 if (real_isfinite (&rr)
10230 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10232 REAL_VALUE_TYPE rmode;
10234 real_convert (&rmode, TYPE_MODE (type), &rr);
10235 /* Proceed iff the specified mode can hold the value. */
10236 if (real_identical (&rmode, &rr))
10237 return build_real (type, rmode);
10240 return NULL_TREE;
10243 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10244 number and no overflow/underflow occurred. INEXACT is true if M
10245 was not exactly calculated. TYPE is the tree type for the result.
10246 This function assumes that you cleared the MPFR flags and then
10247 calculated M to see if anything subsequently set a flag prior to
10248 entering this function. Return NULL_TREE if any checks fail, if
10249 FORCE_CONVERT is true, then bypass the checks. */
10251 static tree
10252 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10254 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10255 overflow/underflow occurred. If -frounding-math, proceed iff the
10256 result of calling FUNC was exact. */
10257 if (force_convert
10258 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10259 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10260 && (!flag_rounding_math || !inexact)))
10262 REAL_VALUE_TYPE re, im;
10264 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10265 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10266 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10267 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10268 but the mpft_t is not, then we underflowed in the
10269 conversion. */
10270 if (force_convert
10271 || (real_isfinite (&re) && real_isfinite (&im)
10272 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10273 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10275 REAL_VALUE_TYPE re_mode, im_mode;
10277 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10278 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10279 /* Proceed iff the specified mode can hold the value. */
10280 if (force_convert
10281 || (real_identical (&re_mode, &re)
10282 && real_identical (&im_mode, &im)))
10283 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10284 build_real (TREE_TYPE (type), im_mode));
10287 return NULL_TREE;
10290 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10291 the pointer *(ARG_QUO) and return the result. The type is taken
10292 from the type of ARG0 and is used for setting the precision of the
10293 calculation and results. */
10295 static tree
10296 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10298 tree const type = TREE_TYPE (arg0);
10299 tree result = NULL_TREE;
10301 STRIP_NOPS (arg0);
10302 STRIP_NOPS (arg1);
10304 /* To proceed, MPFR must exactly represent the target floating point
10305 format, which only happens when the target base equals two. */
10306 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10307 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10308 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10310 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10311 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10313 if (real_isfinite (ra0) && real_isfinite (ra1))
10315 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10316 const int prec = fmt->p;
10317 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10318 tree result_rem;
10319 long integer_quo;
10320 mpfr_t m0, m1;
10322 mpfr_inits2 (prec, m0, m1, NULL);
10323 mpfr_from_real (m0, ra0, GMP_RNDN);
10324 mpfr_from_real (m1, ra1, GMP_RNDN);
10325 mpfr_clear_flags ();
10326 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10327 /* Remquo is independent of the rounding mode, so pass
10328 inexact=0 to do_mpfr_ckconv(). */
10329 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10330 mpfr_clears (m0, m1, NULL);
10331 if (result_rem)
10333 /* MPFR calculates quo in the host's long so it may
10334 return more bits in quo than the target int can hold
10335 if sizeof(host long) > sizeof(target int). This can
10336 happen even for native compilers in LP64 mode. In
10337 these cases, modulo the quo value with the largest
10338 number that the target int can hold while leaving one
10339 bit for the sign. */
10340 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10341 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10343 /* Dereference the quo pointer argument. */
10344 arg_quo = build_fold_indirect_ref (arg_quo);
10345 /* Proceed iff a valid pointer type was passed in. */
10346 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10348 /* Set the value. */
10349 tree result_quo
10350 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10351 build_int_cst (TREE_TYPE (arg_quo),
10352 integer_quo));
10353 TREE_SIDE_EFFECTS (result_quo) = 1;
10354 /* Combine the quo assignment with the rem. */
10355 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10356 result_quo, result_rem));
10361 return result;
10364 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10365 resulting value as a tree with type TYPE. The mpfr precision is
10366 set to the precision of TYPE. We assume that this mpfr function
10367 returns zero if the result could be calculated exactly within the
10368 requested precision. In addition, the integer pointer represented
10369 by ARG_SG will be dereferenced and set to the appropriate signgam
10370 (-1,1) value. */
10372 static tree
10373 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10375 tree result = NULL_TREE;
10377 STRIP_NOPS (arg);
10379 /* To proceed, MPFR must exactly represent the target floating point
10380 format, which only happens when the target base equals two. Also
10381 verify ARG is a constant and that ARG_SG is an int pointer. */
10382 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10383 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10384 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10385 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10387 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10389 /* In addition to NaN and Inf, the argument cannot be zero or a
10390 negative integer. */
10391 if (real_isfinite (ra)
10392 && ra->cl != rvc_zero
10393 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10395 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10396 const int prec = fmt->p;
10397 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10398 int inexact, sg;
10399 mpfr_t m;
10400 tree result_lg;
10402 mpfr_init2 (m, prec);
10403 mpfr_from_real (m, ra, GMP_RNDN);
10404 mpfr_clear_flags ();
10405 inexact = mpfr_lgamma (m, &sg, m, rnd);
10406 result_lg = do_mpfr_ckconv (m, type, inexact);
10407 mpfr_clear (m);
10408 if (result_lg)
10410 tree result_sg;
10412 /* Dereference the arg_sg pointer argument. */
10413 arg_sg = build_fold_indirect_ref (arg_sg);
10414 /* Assign the signgam value into *arg_sg. */
10415 result_sg = fold_build2 (MODIFY_EXPR,
10416 TREE_TYPE (arg_sg), arg_sg,
10417 build_int_cst (TREE_TYPE (arg_sg), sg));
10418 TREE_SIDE_EFFECTS (result_sg) = 1;
10419 /* Combine the signgam assignment with the lgamma result. */
10420 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10421 result_sg, result_lg));
10426 return result;
10429 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10430 mpc function FUNC on it and return the resulting value as a tree
10431 with type TYPE. The mpfr precision is set to the precision of
10432 TYPE. We assume that function FUNC returns zero if the result
10433 could be calculated exactly within the requested precision. If
10434 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10435 in the arguments and/or results. */
10437 tree
10438 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10439 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10441 tree result = NULL_TREE;
10443 STRIP_NOPS (arg0);
10444 STRIP_NOPS (arg1);
10446 /* To proceed, MPFR must exactly represent the target floating point
10447 format, which only happens when the target base equals two. */
10448 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10449 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10450 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10452 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10454 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10455 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10456 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10457 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10459 if (do_nonfinite
10460 || (real_isfinite (re0) && real_isfinite (im0)
10461 && real_isfinite (re1) && real_isfinite (im1)))
10463 const struct real_format *const fmt =
10464 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10465 const int prec = fmt->p;
10466 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10467 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10468 int inexact;
10469 mpc_t m0, m1;
10471 mpc_init2 (m0, prec);
10472 mpc_init2 (m1, prec);
10473 mpfr_from_real (mpc_realref (m0), re0, rnd);
10474 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10475 mpfr_from_real (mpc_realref (m1), re1, rnd);
10476 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10477 mpfr_clear_flags ();
10478 inexact = func (m0, m0, m1, crnd);
10479 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10480 mpc_clear (m0);
10481 mpc_clear (m1);
10485 return result;
10488 /* A wrapper function for builtin folding that prevents warnings for
10489 "statement without effect" and the like, caused by removing the
10490 call node earlier than the warning is generated. */
10492 tree
10493 fold_call_stmt (gcall *stmt, bool ignore)
10495 tree ret = NULL_TREE;
10496 tree fndecl = gimple_call_fndecl (stmt);
10497 location_t loc = gimple_location (stmt);
10498 if (fndecl
10499 && TREE_CODE (fndecl) == FUNCTION_DECL
10500 && DECL_BUILT_IN (fndecl)
10501 && !gimple_call_va_arg_pack_p (stmt))
10503 int nargs = gimple_call_num_args (stmt);
10504 tree *args = (nargs > 0
10505 ? gimple_call_arg_ptr (stmt, 0)
10506 : &error_mark_node);
10508 if (avoid_folding_inline_builtin (fndecl))
10509 return NULL_TREE;
10510 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10512 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10514 else
10516 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10517 if (ret)
10519 /* Propagate location information from original call to
10520 expansion of builtin. Otherwise things like
10521 maybe_emit_chk_warning, that operate on the expansion
10522 of a builtin, will use the wrong location information. */
10523 if (gimple_has_location (stmt))
10525 tree realret = ret;
10526 if (TREE_CODE (ret) == NOP_EXPR)
10527 realret = TREE_OPERAND (ret, 0);
10528 if (CAN_HAVE_LOCATION_P (realret)
10529 && !EXPR_HAS_LOCATION (realret))
10530 SET_EXPR_LOCATION (realret, loc);
10531 return realret;
10533 return ret;
10537 return NULL_TREE;
10540 /* Look up the function in builtin_decl that corresponds to DECL
10541 and set ASMSPEC as its user assembler name. DECL must be a
10542 function decl that declares a builtin. */
10544 void
10545 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10547 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10548 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10549 && asmspec != 0);
10551 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10552 set_user_assembler_name (builtin, asmspec);
10554 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10555 && INT_TYPE_SIZE < BITS_PER_WORD)
10557 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10558 set_user_assembler_libfunc ("ffs", asmspec);
10559 set_optab_libfunc (ffs_optab, mode, "ffs");
10563 /* Return true if DECL is a builtin that expands to a constant or similarly
10564 simple code. */
10565 bool
10566 is_simple_builtin (tree decl)
10568 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10569 switch (DECL_FUNCTION_CODE (decl))
10571 /* Builtins that expand to constants. */
10572 case BUILT_IN_CONSTANT_P:
10573 case BUILT_IN_EXPECT:
10574 case BUILT_IN_OBJECT_SIZE:
10575 case BUILT_IN_UNREACHABLE:
10576 /* Simple register moves or loads from stack. */
10577 case BUILT_IN_ASSUME_ALIGNED:
10578 case BUILT_IN_RETURN_ADDRESS:
10579 case BUILT_IN_EXTRACT_RETURN_ADDR:
10580 case BUILT_IN_FROB_RETURN_ADDR:
10581 case BUILT_IN_RETURN:
10582 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10583 case BUILT_IN_FRAME_ADDRESS:
10584 case BUILT_IN_VA_END:
10585 case BUILT_IN_STACK_SAVE:
10586 case BUILT_IN_STACK_RESTORE:
10587 /* Exception state returns or moves registers around. */
10588 case BUILT_IN_EH_FILTER:
10589 case BUILT_IN_EH_POINTER:
10590 case BUILT_IN_EH_COPY_VALUES:
10591 return true;
10593 default:
10594 return false;
10597 return false;
10600 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10601 most probably expanded inline into reasonably simple code. This is a
10602 superset of is_simple_builtin. */
10603 bool
10604 is_inexpensive_builtin (tree decl)
10606 if (!decl)
10607 return false;
10608 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10609 return true;
10610 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10611 switch (DECL_FUNCTION_CODE (decl))
10613 case BUILT_IN_ABS:
10614 CASE_BUILT_IN_ALLOCA:
10615 case BUILT_IN_BSWAP16:
10616 case BUILT_IN_BSWAP32:
10617 case BUILT_IN_BSWAP64:
10618 case BUILT_IN_CLZ:
10619 case BUILT_IN_CLZIMAX:
10620 case BUILT_IN_CLZL:
10621 case BUILT_IN_CLZLL:
10622 case BUILT_IN_CTZ:
10623 case BUILT_IN_CTZIMAX:
10624 case BUILT_IN_CTZL:
10625 case BUILT_IN_CTZLL:
10626 case BUILT_IN_FFS:
10627 case BUILT_IN_FFSIMAX:
10628 case BUILT_IN_FFSL:
10629 case BUILT_IN_FFSLL:
10630 case BUILT_IN_IMAXABS:
10631 case BUILT_IN_FINITE:
10632 case BUILT_IN_FINITEF:
10633 case BUILT_IN_FINITEL:
10634 case BUILT_IN_FINITED32:
10635 case BUILT_IN_FINITED64:
10636 case BUILT_IN_FINITED128:
10637 case BUILT_IN_FPCLASSIFY:
10638 case BUILT_IN_ISFINITE:
10639 case BUILT_IN_ISINF_SIGN:
10640 case BUILT_IN_ISINF:
10641 case BUILT_IN_ISINFF:
10642 case BUILT_IN_ISINFL:
10643 case BUILT_IN_ISINFD32:
10644 case BUILT_IN_ISINFD64:
10645 case BUILT_IN_ISINFD128:
10646 case BUILT_IN_ISNAN:
10647 case BUILT_IN_ISNANF:
10648 case BUILT_IN_ISNANL:
10649 case BUILT_IN_ISNAND32:
10650 case BUILT_IN_ISNAND64:
10651 case BUILT_IN_ISNAND128:
10652 case BUILT_IN_ISNORMAL:
10653 case BUILT_IN_ISGREATER:
10654 case BUILT_IN_ISGREATEREQUAL:
10655 case BUILT_IN_ISLESS:
10656 case BUILT_IN_ISLESSEQUAL:
10657 case BUILT_IN_ISLESSGREATER:
10658 case BUILT_IN_ISUNORDERED:
10659 case BUILT_IN_VA_ARG_PACK:
10660 case BUILT_IN_VA_ARG_PACK_LEN:
10661 case BUILT_IN_VA_COPY:
10662 case BUILT_IN_TRAP:
10663 case BUILT_IN_SAVEREGS:
10664 case BUILT_IN_POPCOUNTL:
10665 case BUILT_IN_POPCOUNTLL:
10666 case BUILT_IN_POPCOUNTIMAX:
10667 case BUILT_IN_POPCOUNT:
10668 case BUILT_IN_PARITYL:
10669 case BUILT_IN_PARITYLL:
10670 case BUILT_IN_PARITYIMAX:
10671 case BUILT_IN_PARITY:
10672 case BUILT_IN_LABS:
10673 case BUILT_IN_LLABS:
10674 case BUILT_IN_PREFETCH:
10675 case BUILT_IN_ACC_ON_DEVICE:
10676 return true;
10678 default:
10679 return is_simple_builtin (decl);
10682 return false;
10685 /* Return true if T is a constant and the value cast to a target char
10686 can be represented by a host char.
10687 Store the casted char constant in *P if so. */
10689 bool
10690 target_char_cst_p (tree t, char *p)
10692 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10693 return false;
10695 *p = (char)tree_to_uhwi (t);
10696 return true;
10699 /* Return the maximum object size. */
10701 tree
10702 max_object_size (void)
10704 /* To do: Make this a configurable parameter. */
10705 return TYPE_MAX_VALUE (ptrdiff_type_node);